diff --git a/README.md b/README.md
index 85e8af7..79f8ca6 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,8 @@ Add optional environment variable/s for simulating real AI models without mockup
- `npx convex env set GEMINI_API_KEY YOUR_API_KEY`
- `npx convex env set OPENAI_API_KEY YOUR_API_KEY`
+- `npx convex env set ANTHROPIC_API_KEY YOUR_API_KEY`
+- `npx convex env set PERPLEXITY_API_KEY YOUR_API_KEY`
also, you may need to run, but I think the initial setup does that.
diff --git a/app/play/[level]/page.tsx b/app/play/[level]/page.tsx
index e1f8499..7b8431f 100644
--- a/app/play/[level]/page.tsx
+++ b/app/play/[level]/page.tsx
@@ -36,7 +36,31 @@ export default function PlayLevelPage({
}, [map]);
if (!map) {
- return
Loading...
;
+ return (
+
+
+
+
+ Play Different Night
+
+
+ {flags?.showTestPage && (
+ setMode(value as "play" | "test")}
+ >
+
+ Play
+ Test AI
+
+
+ )}
+
+
Night #{level}
+
+
Loading...
+
+ );
}
const handleRetryClicked = () => {
diff --git a/app/play/page.tsx b/app/play/page.tsx
index 2b2e21e..dcc04fa 100644
--- a/app/play/page.tsx
+++ b/app/play/page.tsx
@@ -12,12 +12,23 @@ import {
CardHeader,
CardTitle,
} from "@/components/ui/card";
+import { Skeleton } from "@/components/ui/skeleton";
export default function PlayPage() {
const maps = useQuery(api.maps.getMaps);
if (!maps) {
- return Loading...
;
+ return (
+
+
Choose a Night
+
+
+ {Array.from({ length: 6 }).map((_, index) => (
+
+ ))}
+
+
+ );
}
return (
diff --git a/components/ui/skeleton.tsx b/components/ui/skeleton.tsx
new file mode 100644
index 0000000..d7e45f7
--- /dev/null
+++ b/components/ui/skeleton.tsx
@@ -0,0 +1,15 @@
+import { cn } from "@/lib/utils"
+
+function Skeleton({
+ className,
+ ...props
+}: React.HTMLAttributes) {
+ return (
+
+ )
+}
+
+export { Skeleton }
diff --git a/convex/constants.ts b/convex/constants.ts
index 849abb4..0d6ba7a 100644
--- a/convex/constants.ts
+++ b/convex/constants.ts
@@ -7,6 +7,14 @@ export const AI_MODELS = [
model: "gpt-4o",
name: "OpenAI - 4o Mini",
},
+ {
+ model: "claude-3.5-sonnet",
+ name: "Claude 3.5 Sonnnet"
+ },
+ {
+ model: "perplexity-llama-3.1",
+ name: "Perplextity AI"
+ }
];
export const AI_MODEL_IDS = AI_MODELS.map((model) => model.model);
diff --git a/models/claude-3-5-sonnet.ts b/models/claude-3-5-sonnet.ts
new file mode 100644
index 0000000..c47ce9b
--- /dev/null
+++ b/models/claude-3-5-sonnet.ts
@@ -0,0 +1,44 @@
+import { Anthropic } from "@anthropic-ai/sdk";
+import { type ModelHandler } from ".";
+
+export const claude35sonnet: ModelHandler = async (prompt, map) => {
+ const anthropic = new Anthropic({
+ apiKey: process.env.ANTHROPIC_API_KEY,
+ });
+
+ const response = await anthropic.messages.create({
+ model: "claude-3-sonnet-20240307",
+ max_tokens: 1024,
+ temperature: 0,
+ system: prompt,
+ messages: [
+ {
+ role: "user",
+ content: JSON.stringify(map),
+ },
+ ],
+ });
+
+ const content = response.content[0];
+
+ if (content.type !== "text") {
+ throw new Error("Unexpected response type from Claude");
+ }
+
+ const parsedResponse = JSON.parse(content.text);
+
+ // Validate the response structure
+ if (
+ !Array.isArray(parsedResponse.boxCoordinates) ||
+ !Array.isArray(parsedResponse.playerCoordinates) ||
+ typeof parsedResponse.reasoning !== "string"
+ ) {
+ throw new Error("Invalid response structure");
+ }
+
+ return {
+ boxCoordinates: parsedResponse.boxCoordinates,
+ playerCoordinates: parsedResponse.playerCoordinates,
+ reasoning: parsedResponse.reasoning,
+ };
+};
diff --git a/models/index.ts b/models/index.ts
index 00daf65..6ee3d47 100644
--- a/models/index.ts
+++ b/models/index.ts
@@ -1,5 +1,7 @@
import { gemini15pro } from "./gemini-1.5-pro";
import { gpt4o } from "./gpt-4o";
+import { claude35sonnet } from "./claude-3-5-sonnet";
+import { perplexityModel } from "./perplexity-llama";
export type ModelHandler = (
prompt: string,
@@ -94,6 +96,14 @@ export async function runModel(
result = await gpt4o(prompt, map);
break;
}
+ case "claude-3.5-sonnet": {
+ result = await claude35sonnet(prompt, map);
+ break;
+ }
+ case "perplexity-llama-3.1": {
+ result = await perplexityModel(prompt, map);
+ break;
+ }
default: {
throw new Error(`Tried running unknown model '${modelId}'`);
}
diff --git a/models/perplexity-llama.ts b/models/perplexity-llama.ts
new file mode 100644
index 0000000..d3c59c2
--- /dev/null
+++ b/models/perplexity-llama.ts
@@ -0,0 +1,87 @@
+import axios from 'axios';
+import { z } from 'zod';
+import { ModelHandler } from './index';
+
+const PerplexityResponseSchema = z.object({
+ id: z.string(),
+ model: z.string(),
+ object: z.string(),
+ created: z.number(),
+ choices: z.array(
+ z.object({
+ index: z.number(),
+ finish_reason: z.string(),
+ message: z.object({
+ role: z.string(),
+ content: z.string(),
+ }),
+ delta: z.object({
+ role: z.string(),
+ content: z.string(),
+ }),
+ })
+ ),
+ usage: z.object({
+ prompt_tokens: z.number(),
+ completion_tokens: z.number(),
+ total_tokens: z.number(),
+ }),
+});
+
+const GameResponseSchema = z.object({
+ reasoning: z.string(),
+ playerCoordinates: z.array(z.number()),
+ boxCoordinates: z.array(z.array(z.number())),
+});
+
+export const perplexityModel: ModelHandler = async (prompt: string, map: string[][]) => {
+ const apiKey = process.env.PERPLEXITY_API_KEY;
+ if (!apiKey) {
+ throw new Error('PERPLEXITY_API_KEY is not set in the environment variables');
+ }
+
+ const messages = [
+ { role: 'system', content: 'Be precise and concise.' },
+ { role: 'user', content: prompt },
+ { role: 'user', content: JSON.stringify(map) },
+ ];
+
+ const data = {
+ model: 'llama-3.1-sonar-large-128k-online',
+ messages,
+ temperature: 0.2,
+ top_p: 0.9,
+ return_citations: true,
+ search_domain_filter: ['perplexity.ai'],
+ return_images: false,
+ return_related_questions: false,
+ search_recency_filter: 'month',
+ top_k: 0,
+ stream: false,
+ presence_penalty: 0,
+ frequency_penalty: 1,
+ };
+
+ try {
+ const response = await axios.post('https://api.perplexity.ai/chat/completions', data, {
+ headers: {
+ 'Authorization': `Bearer ${apiKey}`,
+ 'Content-Type': 'application/json',
+ },
+ });
+
+ const validatedResponse = PerplexityResponseSchema.parse(response.data);
+ const content = validatedResponse.choices[0].message.content;
+ const parsedContent = JSON.parse(content);
+ const gameResponse = GameResponseSchema.parse(parsedContent);
+
+ return {
+ boxCoordinates: gameResponse.boxCoordinates,
+ playerCoordinates: gameResponse.playerCoordinates,
+ reasoning: gameResponse.reasoning,
+ };
+ } catch (error) {
+ console.error('Failed to run Perplexity model Error:', error);
+ throw new Error('Failed to run Perplexity model');
+ }
+};
diff --git a/package.json b/package.json
index 4322f2f..c49ddf5 100644
--- a/package.json
+++ b/package.json
@@ -14,6 +14,7 @@
"lint": "next lint"
},
"dependencies": {
+ "@anthropic-ai/sdk": "^0.29.1",
"@auth/core": "^0.34.2",
"@convex-dev/auth": "^0.0.71",
"@google/generative-ai": "^0.21.0",
@@ -25,6 +26,7 @@
"@radix-ui/react-toast": "^1.2.1",
"@radix-ui/react-toggle": "^1.1.0",
"@radix-ui/react-toggle-group": "^1.1.0",
+ "axios": "^1.7.7",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"convex": "^1.16.0",