Skip to content

Commit

Permalink
Merge pull request #62 from krishkalaria12/main
Browse files Browse the repository at this point in the history
Added claude 3.5 sonnet and perplexity with llama model to curated ai…
  • Loading branch information
webdevcody authored Oct 17, 2024
2 parents cb10626 + d692164 commit 4f13660
Show file tree
Hide file tree
Showing 6 changed files with 153 additions and 0 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ Add optional environment variable/s for simulating real AI models without mockup

- `npx convex env set GEMINI_API_KEY YOUR_API_KEY`
- `npx convex env set OPENAI_API_KEY YOUR_API_KEY`
- `npx convex env set ANTHROPIC_API_KEY YOUR_API_KEY`
- `npx convex env set PERPLEXITY_API_KEY YOUR_API_KEY`

also, you may need to run, but I think the initial setup does that.

Expand Down
8 changes: 8 additions & 0 deletions convex/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,14 @@ export const AI_MODELS = [
model: "gpt-4o",
name: "OpenAI - 4o Mini",
},
{
model: "claude-3.5-sonnet",
name: "Claude 3.5 Sonnnet"
},
{
model: "perplexity-llama-3.1",
name: "Perplextity AI"
}
];

export const AI_MODEL_IDS = AI_MODELS.map((model) => model.model);
Expand Down
44 changes: 44 additions & 0 deletions models/claude-3-5-sonnet.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { Anthropic } from "@anthropic-ai/sdk";
import { type ModelHandler } from ".";

export const claude35sonnet: ModelHandler = async (prompt, map) => {
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});

const response = await anthropic.messages.create({
model: "claude-3-sonnet-20240307",
max_tokens: 1024,
temperature: 0,
system: prompt,
messages: [
{
role: "user",
content: JSON.stringify(map),
},
],
});

const content = response.content[0];

if (content.type !== "text") {
throw new Error("Unexpected response type from Claude");
}

const parsedResponse = JSON.parse(content.text);

// Validate the response structure
if (
!Array.isArray(parsedResponse.boxCoordinates) ||
!Array.isArray(parsedResponse.playerCoordinates) ||
typeof parsedResponse.reasoning !== "string"
) {
throw new Error("Invalid response structure");
}

return {
boxCoordinates: parsedResponse.boxCoordinates,
playerCoordinates: parsedResponse.playerCoordinates,
reasoning: parsedResponse.reasoning,
};
};
10 changes: 10 additions & 0 deletions models/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import { gemini15pro } from "./gemini-1.5-pro";
import { gpt4o } from "./gpt-4o";
import { claude35sonnet } from "./claude-3-5-sonnet";
import { perplexityModel } from "./perplexity-llama";

export type ModelHandler = (
prompt: string,
Expand Down Expand Up @@ -88,6 +90,14 @@ export async function runModel(
result = await gpt4o(prompt, map);
break;
}
case "claude-3.5-sonnet": {
result = await claude35sonnet(prompt, map);
break;
}
case "perplexity-llama-3.1": {
result = await perplexityModel(prompt, map);
break;
}
default: {
throw new Error(`Tried running unknown model '${modelId}'`);
}
Expand Down
87 changes: 87 additions & 0 deletions models/perplexity-llama.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import axios from 'axios';
import { z } from 'zod';
import { ModelHandler } from './index';

const PerplexityResponseSchema = z.object({
id: z.string(),
model: z.string(),
object: z.string(),
created: z.number(),
choices: z.array(
z.object({
index: z.number(),
finish_reason: z.string(),
message: z.object({
role: z.string(),
content: z.string(),
}),
delta: z.object({
role: z.string(),
content: z.string(),
}),
})
),
usage: z.object({
prompt_tokens: z.number(),
completion_tokens: z.number(),
total_tokens: z.number(),
}),
});

const GameResponseSchema = z.object({
reasoning: z.string(),
playerCoordinates: z.array(z.number()),
boxCoordinates: z.array(z.array(z.number())),
});

export const perplexityModel: ModelHandler = async (prompt: string, map: string[][]) => {
const apiKey = process.env.PERPLEXITY_API_KEY;
if (!apiKey) {
throw new Error('PERPLEXITY_API_KEY is not set in the environment variables');
}

const messages = [
{ role: 'system', content: 'Be precise and concise.' },
{ role: 'user', content: prompt },
{ role: 'user', content: JSON.stringify(map) },
];

const data = {
model: 'llama-3.1-sonar-large-128k-online',
messages,
temperature: 0.2,
top_p: 0.9,
return_citations: true,
search_domain_filter: ['perplexity.ai'],
return_images: false,
return_related_questions: false,
search_recency_filter: 'month',
top_k: 0,
stream: false,
presence_penalty: 0,
frequency_penalty: 1,
};

try {
const response = await axios.post('https://api.perplexity.ai/chat/completions', data, {
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
});

const validatedResponse = PerplexityResponseSchema.parse(response.data);
const content = validatedResponse.choices[0].message.content;
const parsedContent = JSON.parse(content);
const gameResponse = GameResponseSchema.parse(parsedContent);

return {
boxCoordinates: gameResponse.boxCoordinates,
playerCoordinates: gameResponse.playerCoordinates,
reasoning: gameResponse.reasoning,
};
} catch (error) {
console.error('Failed to run Perplexity model Error:', error);
throw new Error('Failed to run Perplexity model');
}
};
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"lint": "next lint"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.29.1",
"@auth/core": "^0.34.2",
"@convex-dev/auth": "^0.0.71",
"@google/generative-ai": "^0.21.0",
Expand All @@ -25,6 +26,7 @@
"@radix-ui/react-toast": "^1.2.1",
"@radix-ui/react-toggle": "^1.1.0",
"@radix-ui/react-toggle-group": "^1.1.0",
"axios": "^1.7.7",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"convex": "^1.16.0",
Expand Down

0 comments on commit 4f13660

Please sign in to comment.