|
|
@ -436,10 +436,190 @@ |
|
|
|
" claude_messages.append(claude_next)" |
|
|
|
" claude_messages.append(claude_next)" |
|
|
|
] |
|
|
|
] |
|
|
|
}, |
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "markdown", |
|
|
|
|
|
|
|
"id": "392d2072-4e5d-424c-b4b4-098d7e3ead2d", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"# And now for a 3 way convo including Gemini" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "eb0c319c-0226-4c6d-99bc-a9167fa86005", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"# Let's make a conversation between GPT-4o-mini and Claude-3-haiku\n", |
|
|
|
|
|
|
|
"# We're using cheap versions of models so the costs will be minimal\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"gpt_model = \"gpt-4o-mini\"\n", |
|
|
|
|
|
|
|
"claude_model = \"claude-3-haiku-20240307\"\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"gpt_system = \"You are a chatbot who is very argumentative; \\\n", |
|
|
|
|
|
|
|
"you disagree with anything in the conversation and you challenge everything, in a snarky way.\"\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"claude_system = \"You are a very polite, courteous chatbot. You try to agree with \\\n", |
|
|
|
|
|
|
|
"everything the other people in the conversation say, or find common ground. If another person is argumentative, \\\n", |
|
|
|
|
|
|
|
"you try to calm them down and keep chatting.\"\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"gemini_system = \"You are an extremely knowledgeable and know-it-all counselor chatbot. You try to help resolve disagreements, \\\n", |
|
|
|
|
|
|
|
"and if a person is either too argumentative or too polite, you cannot help but to use quotes from famous psychologists to teach \\\n", |
|
|
|
|
|
|
|
"your students to be kind yet maintain boundaries.\"\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"gemini_instance = google.generativeai.GenerativeModel(\n", |
|
|
|
|
|
|
|
" model_name='gemini-1.5-flash',\n", |
|
|
|
|
|
|
|
" system_instruction=gemini_system\n", |
|
|
|
|
|
|
|
")" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "041b1596-a646-4321-a89a-9a51046d9b72", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"gpt_messages = [\"Hi there\"]\n", |
|
|
|
|
|
|
|
"claude_messages = [\"Hi\"]\n", |
|
|
|
|
|
|
|
"gemini_messages = [\"How is everyone?\"]\n", |
|
|
|
|
|
|
|
"gpt_name = \"Bob\"\n", |
|
|
|
|
|
|
|
"claude_name = \"Larry\"\n", |
|
|
|
|
|
|
|
"gemini_name = \"Frank\"" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "fcd0a633-d506-4b68-8411-46f3fbe34752", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"def construct_joined_user_msg(msg1, msg1_name, msg2, msg2_name):\n", |
|
|
|
|
|
|
|
" return msg1_name + ' said: ' + msg1 + '. \\n\\nThen ' + msg2_name + ' said: ' + msg2 + '.'" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "4aef8ba5-1d93-4473-8c5f-707a12d8a1cf", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"def call_gpt(return_msgs=False):\n", |
|
|
|
|
|
|
|
" messages = [{\"role\": \"system\", \"content\": gpt_system}]\n", |
|
|
|
|
|
|
|
" for gpt, claude, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"assistant\", \"content\": gpt})\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"user\", \"content\": construct_joined_user_msg(claude, claude_name, gemini, gemini_name)})\n", |
|
|
|
|
|
|
|
" if return_msgs: return messages\n", |
|
|
|
|
|
|
|
" completion = openai.chat.completions.create(\n", |
|
|
|
|
|
|
|
" model=gpt_model,\n", |
|
|
|
|
|
|
|
" messages=messages\n", |
|
|
|
|
|
|
|
" )\n", |
|
|
|
|
|
|
|
" return completion.choices[0].message.content" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "36365e7a-43b4-4a0d-a241-fff1440509d3", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"call_gpt(return_msgs=False)" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "3ebebebe-8255-4ca5-9335-258f93e3181f", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"def call_claude(return_msgs=False):\n", |
|
|
|
|
|
|
|
" messages = []\n", |
|
|
|
|
|
|
|
" for gpt, claude_msg, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"user\", \"content\": construct_joined_user_msg(gemini, gemini_name, gpt, gpt_name)})\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"assistant\", \"content\": claude_msg})\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"user\", \"content\": gpt_name + \" said \" + gpt_messages[-1]})\n", |
|
|
|
|
|
|
|
" if return_msgs: return messages\n", |
|
|
|
|
|
|
|
" message = claude.messages.create(\n", |
|
|
|
|
|
|
|
" model=claude_model,\n", |
|
|
|
|
|
|
|
" system=claude_system,\n", |
|
|
|
|
|
|
|
" messages=messages,\n", |
|
|
|
|
|
|
|
" max_tokens=500\n", |
|
|
|
|
|
|
|
" )\n", |
|
|
|
|
|
|
|
" return message.content[0].text" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "b2da7997-a1fe-43fe-9970-4014f665e501", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"call_claude(return_msgs=False)" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "94497894-8cb9-4da4-8671-edede74055f8", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"def call_gemini(return_msgs=False):\n", |
|
|
|
|
|
|
|
" messages = []\n", |
|
|
|
|
|
|
|
" for gpt, claude, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"user\", \"parts\": construct_joined_user_msg(gpt, gpt_name, claude, claude_name)})\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"model\", \"parts\": gemini})\n", |
|
|
|
|
|
|
|
" messages.append({\"role\": \"user\", \"parts\": construct_joined_user_msg(gpt_messages[-1], gpt_name, claude_messages[-1], claude_name)})\n", |
|
|
|
|
|
|
|
" if return_msgs: return messages\n", |
|
|
|
|
|
|
|
" message = gemini_instance.generate_content(messages)\n", |
|
|
|
|
|
|
|
" return message.text" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "fddff0fc-0cc3-4473-9d55-effe445ef1ca", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"call_gemini(return_msgs=False)" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
"cell_type": "code", |
|
|
|
|
|
|
|
"execution_count": null, |
|
|
|
|
|
|
|
"id": "1988abf3-1986-40f0-b804-c02b54472b8c", |
|
|
|
|
|
|
|
"metadata": {}, |
|
|
|
|
|
|
|
"outputs": [], |
|
|
|
|
|
|
|
"source": [ |
|
|
|
|
|
|
|
"print(f\"GPT:\\n{gpt_messages[0]}\\n\")\n", |
|
|
|
|
|
|
|
"print(f\"Claude:\\n{claude_messages[0]}\\n\")\n", |
|
|
|
|
|
|
|
"print(f\"Gemini:\\n{gemini_messages[0]}\\n\")\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
"for i in range(5):\n", |
|
|
|
|
|
|
|
" gpt_next = call_gpt()\n", |
|
|
|
|
|
|
|
" print(f\"GPT aka {gpt_name}:\\n{gpt_next}\\n\")\n", |
|
|
|
|
|
|
|
" gpt_messages.append(gpt_next)\n", |
|
|
|
|
|
|
|
" \n", |
|
|
|
|
|
|
|
" claude_next = call_claude()\n", |
|
|
|
|
|
|
|
" print(f\"Claude aka {claude_name}:\\n{claude_next}\\n\")\n", |
|
|
|
|
|
|
|
" claude_messages.append(claude_next)\n", |
|
|
|
|
|
|
|
"\n", |
|
|
|
|
|
|
|
" gemini_next = call_gemini()\n", |
|
|
|
|
|
|
|
" print(f\"Gemini aka {gemini_name}:\\n{gemini_next}\\n\")\n", |
|
|
|
|
|
|
|
" gemini_messages.append(gemini_next)" |
|
|
|
|
|
|
|
] |
|
|
|
|
|
|
|
}, |
|
|
|
{ |
|
|
|
{ |
|
|
|
"cell_type": "code", |
|
|
|
"cell_type": "code", |
|
|
|
"execution_count": null, |
|
|
|
"execution_count": null, |
|
|
|
"id": "2618c3fa-9b8e-4280-a070-d039361b8918", |
|
|
|
"id": "b72906b3-8c4a-4c15-8508-01118d33782a", |
|
|
|
"metadata": {}, |
|
|
|
"metadata": {}, |
|
|
|
"outputs": [], |
|
|
|
"outputs": [], |
|
|
|
"source": [] |
|
|
|
"source": [] |
|
|
|