|
|
|
@ -471,7 +471,7 @@
|
|
|
|
|
" messages.append({\"role\": \"assistant\", \"content\": gpt})\n", |
|
|
|
|
" messages.append({\"role\": \"user\", \"content\": f\"[Sam]: {claude}.[Melissa]: {gemini}\"})\n", |
|
|
|
|
" \n", |
|
|
|
|
" print(f\"JHON[messages]: {messages} \")\n", |
|
|
|
|
" #print(f\"JHON[messages]: {messages} \")\n", |
|
|
|
|
" completion = openai.chat.completions.create(\n", |
|
|
|
|
" model=gpt_model,\n", |
|
|
|
|
" messages=messages\n", |
|
|
|
@ -514,7 +514,7 @@
|
|
|
|
|
" messages.append({\"role\": \"user\", \"content\": f\"[Jhon]: {gpt}. [Melissa]: {ollama}\"})\n", |
|
|
|
|
" messages.append({\"role\": \"assistant\", \"content\": claude_message})\n", |
|
|
|
|
" messages.append({\"role\": \"user\", \"content\":f\"[Jhon]: {gpt_messages[-1]}\"})\n", |
|
|
|
|
" print(f\"SAM[messages]: {messages} \")\n", |
|
|
|
|
" #print(f\"SAM[messages]: {messages} \")\n", |
|
|
|
|
" message = claude.messages.create(\n", |
|
|
|
|
" model=claude_model,\n", |
|
|
|
|
" system=claude_system,\n", |
|
|
|
@ -544,7 +544,6 @@
|
|
|
|
|
"# define context for ollama\n", |
|
|
|
|
"import ollama\n", |
|
|
|
|
"OLLAMA_API = \"http://localhost:11434/api/chat\"\n", |
|
|
|
|
"#HEADERS = {\"Content-Type\": \"application/json\"}\n", |
|
|
|
|
"OLLAMA_MODEL = \"llama3.2\"\n" |
|
|
|
|
] |
|
|
|
|
}, |
|
|
|
@ -563,7 +562,7 @@
|
|
|
|
|
" messages.append({\"role\": \"assistant\", \"content\": ollama_message})\n", |
|
|
|
|
" messages.append({\"role\": \"user\", \"content\":f\"[Jhon]: {gpt_messages[-1]}. [Sam]: {claude_messages[-1]}\"})\n", |
|
|
|
|
"\n", |
|
|
|
|
" print(f\"MELISSA[messages]: {messages} \")\n", |
|
|
|
|
" #print(f\"MELISSA[messages]: {messages} \")\n", |
|
|
|
|
" \n", |
|
|
|
|
" \n", |
|
|
|
|
" payload = {\n", |
|
|
|
@ -596,21 +595,21 @@
|
|
|
|
|
"claude_messages = [\"Hi to you leader.\"]\n", |
|
|
|
|
"ollama_messages = [\"All my respects to the top leader.\"]\n", |
|
|
|
|
"\n", |
|
|
|
|
"print(f\"GPT:\\n{gpt_messages[0]}\\n\")\n", |
|
|
|
|
"print(f\"Claude:\\n{claude_messages[0]}\\n\")\n", |
|
|
|
|
"print(f\"Ollama:\\n{ollama_messages[0]}\\n\")\n", |
|
|
|
|
"print(f\"GPT has JHON:\\n{gpt_messages[0]}\\n\")\n", |
|
|
|
|
"print(f\"Claude has SAM:\\n{claude_messages[0]}\\n\")\n", |
|
|
|
|
"print(f\"Ollama has MELISSA:\\n{ollama_messages[0]}\\n\")\n", |
|
|
|
|
"\n", |
|
|
|
|
"for i in range(5):\n", |
|
|
|
|
" gpt_next = call_gpt()\n", |
|
|
|
|
" print(f\"JHON:\\n{gpt_next}\\n\")\n", |
|
|
|
|
" print(f\"GPT has JHON:\\n{gpt_next}\\n\")\n", |
|
|
|
|
" gpt_messages.append(gpt_next)\n", |
|
|
|
|
" \n", |
|
|
|
|
" claude_next = call_claude()\n", |
|
|
|
|
" print(f\"SAM:\\n{claude_next}\\n\")\n", |
|
|
|
|
" print(f\"CLAUDE has SAM:\\n{claude_next}\\n\")\n", |
|
|
|
|
" claude_messages.append(claude_next)\n", |
|
|
|
|
"\n", |
|
|
|
|
" ollama_next = call_ollama()\n", |
|
|
|
|
" print(f\"MELISSA:\\n{ollama_next}\\n\")\n", |
|
|
|
|
" print(f\"OLLAMA has MELISSA:\\n{ollama_next}\\n\")\n", |
|
|
|
|
" ollama_messages.append(ollama_next)" |
|
|
|
|
] |
|
|
|
|
}, |
|
|
|
|