Skip to content

Commit

Permalink
refactor pricing tracking
Browse files Browse the repository at this point in the history
  • Loading branch information
webdevcody committed Nov 5, 2024
1 parent 5a6e3d8 commit 2f7332c
Show file tree
Hide file tree
Showing 11 changed files with 156 additions and 121 deletions.
3 changes: 3 additions & 0 deletions app/multiplayer/[multiplayerGameId]/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ export default function MultiplayerPage({
return (
<Page>
<PageTitle>Multiplayer</PageTitle>
<div className="mb-4 flex justify-center">
<span>Cost: ${multiplayerGame.cost?.toFixed(2)}</span>
</div>
<div className="flex justify-center">
<Visualizer
controls={false}
Expand Down
35 changes: 22 additions & 13 deletions convex/multiplayerGames.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,23 @@ export const updateMultiplayerGameBoardState = internalMutation({
multiplayerGameId: v.id("multiplayerGames"),
boardState: v.array(v.array(v.string())),
completedTurns: v.number(),
cost: v.optional(v.number()),
},
handler: async (ctx, args) => {
await ctx.db.patch(args.multiplayerGameId, {
const patch: {
boardState: string[][];
completedTurns: number;
cost?: number;
} = {
boardState: args.boardState,
completedTurns: args.completedTurns,
});
};

if (args.cost !== undefined) {
patch.cost = args.cost;
}

await ctx.db.patch(args.multiplayerGameId, patch);
},
});

Expand All @@ -113,21 +124,16 @@ export const runMultiplayerGameTurn = internalAction({
const map = new ZombieSurvival(multiplayerGame.boardState);

if (turn === "Z") {
map.stepZombies();

const numPlayers = multiplayerGame.playerMap.length;
let zombiesToSpawn = 1;
if (numPlayers === 1) {
zombiesToSpawn = 1;
} else if (numPlayers === 2) {
zombiesToSpawn = 2;
} else if (numPlayers === 3) {
zombiesToSpawn = 2;
} else if (numPlayers === 4) {
zombiesToSpawn = 3;
}
const zombiesToSpawn = Math.min(
Math.floor(Math.random() * numPlayers) + 1,
numPlayers,
);
for (let i = 0; i < zombiesToSpawn; i++) {
map.spawnRandomZombie();
}
map.stepZombies();

await ctx.runMutation(
internal.multiplayerGames.updateMultiplayerGameBoardState,
Expand Down Expand Up @@ -177,6 +183,8 @@ export const runMultiplayerGameTurn = internalAction({
turn,
);

console.log("cost", results.cost);

if (results.moveDirection && results.moveDirection !== "STAY") {
const moveDirection = fromDirectionString(results.moveDirection);
const p = map.getPlayer(turn);
Expand Down Expand Up @@ -205,6 +213,7 @@ export const runMultiplayerGameTurn = internalAction({
multiplayerGameId,
boardState: map.getState(),
completedTurns: multiplayerGame.completedTurns,
cost: (multiplayerGame.cost ?? 0) + (results.cost ?? 0),
},
);
}
Expand Down
1 change: 1 addition & 0 deletions convex/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ export default defineSchema({
multiplayerGames: defineTable({
boardState: v.array(v.array(v.string())),
completedTurns: v.number(),
cost: v.optional(v.number()),
playerMap: v.array(
v.object({
modelSlug: v.string(),
Expand Down
26 changes: 6 additions & 20 deletions models/claude-3-5-sonnet.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { type ModelHandler } from ".";
import { Anthropic } from "@anthropic-ai/sdk";
import { z } from "zod";
import { calculateTotalCost } from "./pricing";

const responseSchema = z.object({
playerCoordinates: z.array(z.number()),
Expand Down Expand Up @@ -53,32 +54,17 @@ export const claude35sonnet: ModelHandler = async (
const totalTokensUsed =
completion.usage.input_tokens + completion.usage.output_tokens;

// https://docs.anthropic.com/en/docs/about-claude/models
const getPriceForInputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (3.0 / 1_000_000) * tokenCount;
};

const getPriceForOutputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (15.0 / 1_000_000) * tokenCount;
};

return {
boxCoordinates: response.data.boxCoordinates,
playerCoordinates: response.data.playerCoordinates,
reasoning: response.data.reasoning,
promptTokens: promptTokens,
outputTokens: outputTokens,
totalTokensUsed: totalTokensUsed,
totalRunCost:
getPriceForInputToken(promptTokens) +
getPriceForOutputToken(outputTokens),
totalRunCost: calculateTotalCost(
"claude-3.5-sonnet",
promptTokens,
outputTokens,
),
};
};
30 changes: 6 additions & 24 deletions models/gemini-1.5-pro.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { type ModelHandler } from ".";
import { GoogleGenerativeAI, SchemaType } from "@google/generative-ai";
import { calculateTotalCost } from "./pricing";

interface GeminiResponse {
boxCoordinates: number[][];
Expand Down Expand Up @@ -73,36 +74,17 @@ export const gemini15pro: ModelHandler = async (
const outputTokens = result.response.usageMetadata?.candidatesTokenCount;
const totalTokensUsed = result.response.usageMetadata?.totalTokenCount;

// https://ai.google.dev/pricing#1_5pro
const getPriceForInputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}
if (tokenCount > 128_000) {
return (2.5 / 1_000_000) * tokenCount;
}
return (1.25 / 1_000_000) * tokenCount;
};

const getPriceForOutputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}
if (tokenCount > 128_000) {
return (10.0 / 1_000_000) * tokenCount;
}
return (5.0 / 1_000_000) * tokenCount;
};

return {
boxCoordinates: parsedResponse.boxCoordinates,
playerCoordinates: parsedResponse.playerCoordinates,
reasoning: parsedResponse.reasoning,
promptTokens: promptTokens,
outputTokens: outputTokens,
totalTokensUsed: totalTokensUsed,
totalRunCost:
getPriceForInputToken(promptTokens) +
getPriceForOutputToken(outputTokens),
totalRunCost: calculateTotalCost(
"gemini-1.5-pro",
promptTokens,
outputTokens,
),
};
};
26 changes: 6 additions & 20 deletions models/gpt-4o.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@ import { type ModelHandler } from ".";
import OpenAI from "openai";
import { zodResponseFormat } from "openai/helpers/zod";
import { z } from "zod";
import {
calculateTotalCost,
getPriceForInputToken,
getPriceForOutputToken,
} from "./pricing";

const responseSchema = z.object({
reasoning: z.string(),
Expand Down Expand Up @@ -42,32 +47,13 @@ export const gpt4o: ModelHandler = async (systemPrompt, userPrompt, config) => {
const outputTokens = completion.usage?.completion_tokens;
const totalTokensUsed = completion.usage?.total_tokens;

// https://openai.com/api/pricing/
const getPriceForInputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (2.5 / 1_000_000) * tokenCount;
};

const getPriceForOutputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (10.0 / 1_000_000) * tokenCount;
};

return {
boxCoordinates: response.parsed.boxCoordinates,
playerCoordinates: response.parsed.playerCoordinates,
reasoning: response.parsed.reasoning,
promptTokens: promptTokens,
outputTokens: outputTokens,
totalTokensUsed: totalTokensUsed,
totalRunCost:
getPriceForInputToken(promptTokens) +
getPriceForOutputToken(outputTokens),
totalRunCost: calculateTotalCost("gpt-4o", promptTokens, outputTokens),
};
};
26 changes: 6 additions & 20 deletions models/mistral-large-2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { type ModelHandler } from ".";
import { isJSON } from "../lib/utils";
import { Mistral } from "@mistralai/mistralai";
import { z } from "zod";
import { calculateTotalCost } from "./pricing";

const responseSchema = z.object({
reasoning: z.string(),
Expand Down Expand Up @@ -49,31 +50,16 @@ export const mistralLarge2: ModelHandler = async (
const outputTokens = completion.usage.completionTokens;
const totalTokensUsed = completion.usage.totalTokens;

// https://mistral.ai/technology/
const getPriceForInputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (2.0 / 1_000_000) * tokenCount;
};

const getPriceForOutputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (6.0 / 1_000_000) * tokenCount;
};

const response = await responseSchema.safeParseAsync({
...JSON.parse(content),
promptTokens: completion.usage.promptTokens,
outputTokens: completion.usage.completionTokens,
totalTokensUsed: completion.usage.totalTokens,
totalRunCost:
getPriceForInputToken(promptTokens) +
getPriceForOutputToken(outputTokens),
totalRunCost: calculateTotalCost(
"mistral-large-2",
promptTokens,
outputTokens,
),
});

if (!response.success) {
Expand Down
5 changes: 5 additions & 0 deletions models/multiplayer/gpt-4o.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { type MultiplayerModelHandler } from ".";
import { calculateTotalCost } from "../pricing";
import OpenAI from "openai";
import { zodResponseFormat } from "openai/helpers/zod";
import { z } from "zod";
Expand Down Expand Up @@ -35,6 +36,9 @@ export const gpt4o: MultiplayerModelHandler = async (

const response = completion.choices[0].message;

const promptTokens = completion.usage?.prompt_tokens;
const outputTokens = completion.usage?.completion_tokens;

if (response.refusal) {
throw new Error(`Refusal: ${response.refusal}`);
} else if (!response.parsed) {
Expand All @@ -44,5 +48,6 @@ export const gpt4o: MultiplayerModelHandler = async (
return {
moveDirection: response.parsed.moveDirection,
zombieToShoot: response.parsed.zombieToShoot,
cost: calculateTotalCost("gpt-4o", promptTokens, outputTokens),
};
};
6 changes: 5 additions & 1 deletion models/multiplayer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,17 @@ The 2d Grid is made up of characters, where each character has a meaning.
- Zombies can't move through rocks.
- Zombies can't move through each other.
- Zombies always try to move towards the playing using BFS algorithm.
- Zombies will spawn near the edges of the map
# Player Rules
- Players can move horizontally or vertically.
- Players can't move into occupied spaces or outside the grid.
- Players can throw one popsickle at a zombie each turn.
- Players should move away from zombies.
- Players should probably shoot at the closest zombie
- Stay away from the edges of the map because zombies spawn there.
# Output Format
- Respond only with valid JSON. Do not write an introduction or summary.
- Assume a position on the 2d grid is always represented as [ROW, COL].
- Your output should be a JSON object with the following format:
Expand All @@ -61,6 +62,7 @@ export type MultiplayerModelHandler = (
) => Promise<{
moveDirection: string;
zombieToShoot: number[];
cost: number;
}>;

const MAX_RETRIES = 1;
Expand All @@ -76,6 +78,7 @@ export type RunModelResult = {
moveDirection?: string;
zombieToShoot?: number[];
reasoning?: string;
cost?: number;
};

export async function runMultiplayerModel(
Expand Down Expand Up @@ -114,6 +117,7 @@ export async function runMultiplayerModel(
return {
moveDirection: result.moveDirection,
zombieToShoot: result.zombieToShoot,
cost: result.cost,
};
} catch (error) {
if (retry === MAX_RETRIES || reasoning === null) {
Expand Down
29 changes: 6 additions & 23 deletions models/perplexity-llama-3.1.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { isJSON } from "../lib/utils";
import { z } from "zod";
import { ModelHandler } from "./index";
import { calculateTotalCost } from "./pricing";

const completionSchema = z.object({
id: z.string(),
Expand Down Expand Up @@ -94,29 +95,11 @@ export const perplexityLlama31: ModelHandler = async (
throw new Error("JSON returned by perplexity is malformed");
}

// https://docs.perplexity.ai/guides/pricing#perplexity-sonar-models
const getPriceForInputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (1.0 / 1_000_000) * tokenCount;
};

const getPriceForOutputToken = (tokenCount?: number) => {
if (!tokenCount) {
return 0;
}

return (1.0 / 1_000_000) * tokenCount;
};

const priceForRequest = 5 / 1_000;

const totalRunCost =
getPriceForInputToken(promptTokens) +
getPriceForOutputToken(outputTokens) +
priceForRequest;
const totalRunCost = calculateTotalCost(
"perplexity-llama-3.1",
promptTokens,
outputTokens,
);

const parsedContent = JSON.parse(jsonContent);
const response = await responseSchema.safeParseAsync({
Expand Down
Loading

0 comments on commit 2f7332c

Please sign in to comment.