Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 95 additions & 38 deletions docs/static/llama-stack-spec.html
Original file line number Diff line number Diff line change
Expand Up @@ -9195,6 +9195,51 @@
"title": "OpenAIResponseOutputMessageWebSearchToolCall",
"description": "Web search tool call output message for OpenAI responses."
},
"OpenAIResponsePromptParam": {
"type": "object",
"properties": {
"id": {
"type": "string",
"description": "Unique identifier of the prompt template"
},
"version": {
"type": "string",
"description": "Version number of the prompt to use (defaults to latest if not specified)"
},
"variables": {
"type": "object",
"additionalProperties": {
"oneOf": [
{
"type": "null"
},
{
"type": "boolean"
},
{
"type": "number"
},
{
"type": "string"
},
{
"type": "array"
},
{
"type": "object"
}
]
},
"description": "Dictionary of variable names to values for template substitution"
}
},
"additionalProperties": false,
"required": [
"id"
],
"title": "OpenAIResponsePromptParam",
"description": "Prompt object that is used for OpenAI responses."
},
"OpenAIResponseText": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -9289,6 +9334,10 @@
"type": "string",
"description": "The underlying LLM used for completions."
},
"prompt": {
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
"description": "Prompt object with ID, version, and variables."
},
"instructions": {
"type": "string"
},
Expand Down Expand Up @@ -9393,6 +9442,10 @@
"type": "string",
"description": "(Optional) ID of the previous response in a conversation"
},
"prompt": {
"$ref": "#/components/schemas/Prompt",
"description": "(Optional) Prompt object with ID, version, and variables"
},
"status": {
"type": "string",
"description": "Current status of the response generation"
Expand Down Expand Up @@ -9584,6 +9637,44 @@
"title": "OpenAIResponseOutputMessageMCPListTools",
"description": "MCP list tools output message containing available tools from an MCP server."
},
"Prompt": {
"type": "object",
"properties": {
"prompt": {
"type": "string",
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
},
"version": {
"type": "integer",
"description": "Version (integer starting at 1, incremented on save)"
},
"prompt_id": {
"type": "string",
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
},
"variables": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of prompt variable names that can be used in the prompt template"
},
"is_default": {
"type": "boolean",
"default": false,
"description": "Boolean indicating whether this version is the default version for this prompt"
}
},
"additionalProperties": false,
"required": [
"version",
"prompt_id",
"variables",
"is_default"
],
"title": "Prompt",
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
},
"OpenAIResponseContentPartOutputText": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -10460,44 +10551,6 @@
],
"title": "CreatePromptRequest"
},
"Prompt": {
"type": "object",
"properties": {
"prompt": {
"type": "string",
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
},
"version": {
"type": "integer",
"description": "Version (integer starting at 1, incremented on save)"
},
"prompt_id": {
"type": "string",
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
},
"variables": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of prompt variable names that can be used in the prompt template"
},
"is_default": {
"type": "boolean",
"default": false,
"description": "Boolean indicating whether this version is the default version for this prompt"
}
},
"additionalProperties": false,
"required": [
"version",
"prompt_id",
"variables",
"is_default"
],
"title": "Prompt",
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
},
"OpenAIDeleteResponseObject": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -13614,6 +13667,10 @@
"type": "string",
"description": "(Optional) ID of the previous response in a conversation"
},
"prompt": {
"$ref": "#/components/schemas/Prompt",
"description": "(Optional) Prompt object with ID, version, and variables"
},
"status": {
"type": "string",
"description": "Current status of the response generation"
Expand Down
116 changes: 78 additions & 38 deletions docs/static/llama-stack-spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6679,6 +6679,34 @@ components:
OpenAIResponseOutputMessageWebSearchToolCall
description: >-
Web search tool call output message for OpenAI responses.
OpenAIResponsePromptParam:
type: object
properties:
id:
type: string
description: Unique identifier of the prompt template
version:
type: string
description: >-
Version number of the prompt to use (defaults to latest if not specified)
variables:
type: object
additionalProperties:
oneOf:
- type: 'null'
- type: boolean
- type: number
- type: string
- type: array
- type: object
description: >-
Dictionary of variable names to values for template substitution
additionalProperties: false
required:
- id
title: OpenAIResponsePromptParam
description: >-
Prompt object that is used for OpenAI responses.
OpenAIResponseText:
type: object
properties:
Expand Down Expand Up @@ -6744,6 +6772,10 @@ components:
model:
type: string
description: The underlying LLM used for completions.
prompt:
$ref: '#/components/schemas/OpenAIResponsePromptParam'
description: >-
Prompt object with ID, version, and variables.
instructions:
type: string
previous_response_id:
Expand Down Expand Up @@ -6833,6 +6865,10 @@ components:
type: string
description: >-
(Optional) ID of the previous response in a conversation
prompt:
$ref: '#/components/schemas/Prompt'
description: >-
(Optional) Prompt object with ID, version, and variables
status:
type: string
description: >-
Expand Down Expand Up @@ -6983,6 +7019,44 @@ components:
title: OpenAIResponseOutputMessageMCPListTools
description: >-
MCP list tools output message containing available tools from an MCP server.
Prompt:
type: object
properties:
prompt:
type: string
description: >-
The system prompt text with variable placeholders. Variables are only
supported when using the Responses API.
version:
type: integer
description: >-
Version (integer starting at 1, incremented on save)
prompt_id:
type: string
description: >-
Unique identifier formatted as 'pmpt_<48-digit-hash>'
variables:
type: array
items:
type: string
description: >-
List of prompt variable names that can be used in the prompt template
is_default:
type: boolean
default: false
description: >-
Boolean indicating whether this version is the default version for this
prompt
additionalProperties: false
required:
- version
- prompt_id
- variables
- is_default
title: Prompt
description: >-
A prompt resource representing a stored OpenAI Compatible prompt template
in Llama Stack.
OpenAIResponseContentPartOutputText:
type: object
properties:
Expand Down Expand Up @@ -7697,44 +7771,6 @@ components:
required:
- prompt
title: CreatePromptRequest
Prompt:
type: object
properties:
prompt:
type: string
description: >-
The system prompt text with variable placeholders. Variables are only
supported when using the Responses API.
version:
type: integer
description: >-
Version (integer starting at 1, incremented on save)
prompt_id:
type: string
description: >-
Unique identifier formatted as 'pmpt_<48-digit-hash>'
variables:
type: array
items:
type: string
description: >-
List of prompt variable names that can be used in the prompt template
is_default:
type: boolean
default: false
description: >-
Boolean indicating whether this version is the default version for this
prompt
additionalProperties: false
required:
- version
- prompt_id
- variables
- is_default
title: Prompt
description: >-
A prompt resource representing a stored OpenAI Compatible prompt template
in Llama Stack.
OpenAIDeleteResponseObject:
type: object
properties:
Expand Down Expand Up @@ -10069,6 +10105,10 @@ components:
type: string
description: >-
(Optional) ID of the previous response in a conversation
prompt:
$ref: '#/components/schemas/Prompt'
description: >-
(Optional) Prompt object with ID, version, and variables
status:
type: string
description: >-
Expand Down
4 changes: 3 additions & 1 deletion llama_stack/apis/agents/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
OpenAIResponseInputTool,
OpenAIResponseObject,
OpenAIResponseObjectStream,
OpenAIResponsePromptParam,
OpenAIResponseText,
)

Expand Down Expand Up @@ -711,6 +712,7 @@ async def create_openai_response(
self,
input: str | list[OpenAIResponseInput],
model: str,
prompt: OpenAIResponsePromptParam | None = None,
instructions: str | None = None,
previous_response_id: str | None = None,
store: bool | None = True,
Expand All @@ -722,9 +724,9 @@ async def create_openai_response(
max_infer_iters: int | None = 10, # this is an extension to the OpenAI API
) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]:
"""Create a new OpenAI response.

:param input: Input message(s) to create the response.
:param model: The underlying LLM used for completions.
:param prompt: Prompt object with ID, version, and variables.
:param previous_response_id: (Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses.
:param include: (Optional) Additional fields to include in the response.
:returns: An OpenAIResponseObject.
Expand Down
Loading
Loading