From cf4472a110a6ff4241ca89f515f86bec2d4ef544 Mon Sep 17 00:00:00 2001 From: novellac <38117965+novellac@users.noreply.github.com> Date: Sun, 19 Mar 2023 21:23:29 -0400 Subject: [PATCH] chore: update to gpt 3.5 turbo --- api/generate.js | 24 ++++++++++++++++++------ utils/OpenAIStream.js | 5 +++-- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/api/generate.js b/api/generate.js index f7a0754..c2da44a 100644 --- a/api/generate.js +++ b/api/generate.js @@ -8,15 +8,27 @@ const handler = async (req) => { const { bio, jobDescription } = await req.json() const prompt = ` - Given the current resume profile, ${bio}, generate an improved resume - profile based on the job description, - ${jobDescription}. If the resume profile mentions fewer years of experience than the - job description asks for, then only use the number of years of experience from the resume profile. + Given the current resume profile, ${bio}, + generate an improved resume profile based + on the job description, ${jobDescription}. + If the resume profile mentions fewer years + of experience than the job description asks + for, then only use the number of years of + experience from the resume profile and do not + mention the number of years of experience from + the job description. ` + if (!bio || !jobDescription) { + return new Response( + 'No resume profile or no job description in the request', + { status: 400 } + ) + } + const payload = { - model: 'text-davinci-003', - prompt, + model: 'gpt-3.5-turbo', + messages: [{ role: 'user', content: prompt }], temperature: 0.7, top_p: 1, frequency_penalty: 0, diff --git a/utils/OpenAIStream.js b/utils/OpenAIStream.js index 7604b50..879a72a 100644 --- a/utils/OpenAIStream.js +++ b/utils/OpenAIStream.js @@ -6,7 +6,7 @@ export async function OpenAIStream(payload) { let counter = 0 - const res = await fetch('https://api.openai.com/v1/completions', { + const res = await fetch('https://api.openai.com/v1/chat/completions', { headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${process.env.OPENAI_API_KEY ?? ''}`, @@ -27,11 +27,12 @@ export async function OpenAIStream(payload) { } try { const json = JSON.parse(data) - const text = json.choices[0].text + const text = json.choices[0].delta?.content || '' if (counter < 2 && (text.match(/\n/) || []).length) { return } const queue = encoder.encode(text) + controller.enqueue(queue) counter++ } catch (e) {