Skip to content

Commit

Permalink
fix(js): fix streaming json mode in gemini plugins (#1003)
Browse files Browse the repository at this point in the history
  • Loading branch information
cabljac authored Oct 3, 2024
1 parent 231591b commit aeb91b0
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 7 deletions.
3 changes: 2 additions & 1 deletion js/plugins/googleai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
* limitations under the License.
*/

import { extractJson } from '@genkit-ai/ai/extract';
import {
CandidateData,
defineModel,
Expand Down Expand Up @@ -388,7 +389,7 @@ function toGeminiPart(part: Part): GeminiPart {

function fromGeminiPart(part: GeminiPart, jsonMode: boolean): Part {
if (jsonMode && part.text !== undefined) {
return { data: JSON.parse(part.text) };
return { data: extractJson(part.text) };
}
if (part.text !== undefined) return { text: part.text };
if (part.inlineData) return fromInlineData(part);
Expand Down
3 changes: 2 additions & 1 deletion js/plugins/vertexai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
* limitations under the License.
*/

import { extractJson } from '@genkit-ai/ai/extract';
import {
CandidateData,
defineModel,
Expand Down Expand Up @@ -401,7 +402,7 @@ function fromGeminiFunctionResponsePart(part: GeminiPart): Part {
// Converts vertex part to genkit part
function fromGeminiPart(part: GeminiPart, jsonMode: boolean): Part {
if (jsonMode && part.text !== undefined) {
return { data: JSON.parse(part.text) };
return { data: extractJson(part.text) };
}
if (part.text !== undefined) return { text: part.text };
if (part.functionCall) return fromGeminiFunctionCallPart(part);
Expand Down
75 changes: 70 additions & 5 deletions js/testapps/basic-gemini/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,8 @@ import {

const provider = process.env.PROVIDER || 'vertexai';

const plugin = provider === 'vertexai' ? vertexAI : googleAI;

configureGenkit({
plugins: [plugin()],
plugins: [vertexAI(), googleAI()],
// Log debug output to tbe console.
logLevel: 'debug',
// Perform OpenTelemetry instrumentation and enable trace collection.
Expand All @@ -61,10 +59,10 @@ const jokeSubjectGenerator = defineTool(
export const jokeFlow = defineFlow(
{
name: 'jokeFlow',
inputSchema: z.void(),
inputSchema: z.object({ provider: z.enum(['vertexai', 'googleai']) }),
outputSchema: z.any(),
},
async () => {
async ({ provider }) => {
// Construct a request and send it to the model API.
if (provider === 'vertexai') {
const llmResponse = await generate({
Expand Down Expand Up @@ -100,6 +98,73 @@ export const jokeFlow = defineFlow(
}
);

export const streamingFlow = defineFlow(
{
name: 'streamingFlow',
inputSchema: z.object({ provider: z.enum(['vertexai', 'googleai']) }),
outputSchema: z.any(),
},
async ({ provider }) => {
let count = 0;

if (provider === 'vertexai') {
// Construct a request and send it to the model API.
const llmResponse = await generate({
model: gemini15FlashVertexAi,
config: {
temperature: 2,
},
output: {
schema: z.array(
z.object({
name: z.string(),
age: z.number(),
description: z.string(),
personal_statement: z.string(),
})
),
},
tools: [jokeSubjectGenerator],
prompt: `come up with some test user data. 10 users long`,
streamingCallback: (chunk) => {
count++;
const output = chunk.text();
console.log(`chunk ${count}`, output);
return output;
},
});

return llmResponse.output()!;
} else {
const llmResponse = await generate({
model: gemini15FlashGoogleAi,
config: {
temperature: 2,
},
output: {
schema: z.array(
z.object({
name: z.string(),
age: z.number(),
description: z.string(),
personal_statement: z.string(),
})
),
},
tools: [jokeSubjectGenerator],
prompt: `come up with some test user data. 10 users long`,
streamingCallback: (chunk) => {
count++;
const output = chunk.text();
console.log(`chunk ${count}`, output);
return output;
},
});
return llmResponse.output()!;
}
}
);

// Start a flow server, which exposes your flows as HTTP endpoints. This call
// must come last, after all of your plug-in configuration and flow definitions.
// You can optionally specify a subset of flows to serve, and configure some
Expand Down

0 comments on commit aeb91b0

Please sign in to comment.