5 changed files with 6622 additions and 1026 deletions
@ -1,3 +1,5 @@ |
|||||||
node_modules/ |
node_modules/ |
||||||
dist/ |
dist/ |
||||||
build/ |
build/ |
||||||
|
|
||||||
|
out/ |
@ -1,45 +1,48 @@ |
|||||||
const { OpenAI } = require("openai"); |
const {OpenAI} = require("openai"); |
||||||
require("dotenv").config({ |
require("dotenv").config({ |
||||||
path: require("os").homedir() + "/.config/fabric/.env", |
path: require("os").homedir() + "/.config/fabric/.env", |
||||||
}); |
}); |
||||||
|
|
||||||
let openaiClient = null; |
let openAIClient = null; |
||||||
|
|
||||||
// Function to initialize and get the OpenAI client
|
// Function to initialize and get the OpenAI client
|
||||||
function getOpenAIClient() { |
function getOpenAIClient() { |
||||||
if (!process.env.OPENAI_API_KEY) { |
if (!process.env.OPENAI_API_KEY) { |
||||||
throw new Error( |
throw new Error( |
||||||
"The OPENAI_API_KEY environment variable is missing or empty." |
"The OPENAI_API_KEY environment variable is missing or empty." |
||||||
); |
); |
||||||
} |
} |
||||||
return new OpenAI({ apiKey: process.env.OPENAI_API_KEY }); |
if(!openAIClient){ |
||||||
|
openAIClient = new OpenAI({apiKey: process.env.OPENAI_API_KEY}); |
||||||
|
} |
||||||
|
return openAIClient |
||||||
} |
} |
||||||
|
|
||||||
async function queryOpenAI(system, user, callback) { |
async function queryOpenAI(system, user, callback) { |
||||||
const openai = getOpenAIClient(); // Ensure the client is initialized here
|
const openai = getOpenAIClient(); // Ensure the client is initialized here
|
||||||
const messages = [ |
const messages = [ |
||||||
{ role: "system", content: system }, |
{role: "system", content: system}, |
||||||
{ role: "user", content: user }, |
{role: "user", content: user}, |
||||||
]; |
]; |
||||||
try { |
try { |
||||||
const stream = await openai.chat.completions.create({ |
const stream = await openai.chat.completions.create({ |
||||||
model: "gpt-4-1106-preview", // Adjust the model as necessary.
|
model: "gpt-4-1106-preview", // Adjust the model as necessary.
|
||||||
messages: messages, |
messages: messages, |
||||||
temperature: 0.0, |
temperature: 0.0, |
||||||
top_p: 1, |
top_p: 1, |
||||||
frequency_penalty: 0.1, |
frequency_penalty: 0.1, |
||||||
presence_penalty: 0.1, |
presence_penalty: 0.1, |
||||||
stream: true, |
stream: true, |
||||||
}); |
}); |
||||||
|
|
||||||
for await (const chunk of stream) { |
for await (const chunk of stream) { |
||||||
const message = chunk.choices[0]?.delta?.content || ""; |
const message = chunk.choices[0]?.delta?.content || ""; |
||||||
callback(message); // Process each chunk of data
|
callback(message); // Process each chunk of data
|
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
console.error("Error querying OpenAI:", error); |
||||||
|
callback("Error querying OpenAI. Please try again."); |
||||||
} |
} |
||||||
} catch (error) { |
|
||||||
console.error("Error querying OpenAI:", error); |
|
||||||
callback("Error querying OpenAI. Please try again."); |
|
||||||
} |
|
||||||
} |
} |
||||||
|
|
||||||
module.exports = { queryOpenAI }; |
module.exports = {queryOpenAI}; |
||||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue