Skip to content

Commit d76b15b

Browse files
committed
feat: add Prompts API to Responses API
1 parent 8dc9fd6 commit d76b15b

File tree

11 files changed

+332
-80
lines changed

11 files changed

+332
-80
lines changed

docs/static/llama-stack-spec.html

Lines changed: 95 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -9283,6 +9283,51 @@
92839283
"title": "OpenAIResponseOutputMessageWebSearchToolCall",
92849284
"description": "Web search tool call output message for OpenAI responses."
92859285
},
9286+
"OpenAIResponsePromptParam": {
9287+
"type": "object",
9288+
"properties": {
9289+
"id": {
9290+
"type": "string",
9291+
"description": "Unique identifier of the prompt template"
9292+
},
9293+
"version": {
9294+
"type": "string",
9295+
"description": "Version number of the prompt to use (defaults to latest if not specified)"
9296+
},
9297+
"variables": {
9298+
"type": "object",
9299+
"additionalProperties": {
9300+
"oneOf": [
9301+
{
9302+
"type": "null"
9303+
},
9304+
{
9305+
"type": "boolean"
9306+
},
9307+
{
9308+
"type": "number"
9309+
},
9310+
{
9311+
"type": "string"
9312+
},
9313+
{
9314+
"type": "array"
9315+
},
9316+
{
9317+
"type": "object"
9318+
}
9319+
]
9320+
},
9321+
"description": "Dictionary of variable names to values for template substitution"
9322+
}
9323+
},
9324+
"additionalProperties": false,
9325+
"required": [
9326+
"id"
9327+
],
9328+
"title": "OpenAIResponsePromptParam",
9329+
"description": "Prompt object that is used for OpenAI responses."
9330+
},
92869331
"OpenAIResponseText": {
92879332
"type": "object",
92889333
"properties": {
@@ -9377,6 +9422,10 @@
93779422
"type": "string",
93789423
"description": "The underlying LLM used for completions."
93799424
},
9425+
"prompt": {
9426+
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
9427+
"description": "Prompt object with ID, version, and variables."
9428+
},
93809429
"instructions": {
93819430
"type": "string"
93829431
},
@@ -9481,6 +9530,10 @@
94819530
"type": "string",
94829531
"description": "(Optional) ID of the previous response in a conversation"
94839532
},
9533+
"prompt": {
9534+
"$ref": "#/components/schemas/Prompt",
9535+
"description": "(Optional) Prompt object with ID, version, and variables"
9536+
},
94849537
"status": {
94859538
"type": "string",
94869539
"description": "Current status of the response generation"
@@ -9676,6 +9729,44 @@
96769729
"title": "OpenAIResponseOutputMessageMCPListTools",
96779730
"description": "MCP list tools output message containing available tools from an MCP server."
96789731
},
9732+
"Prompt": {
9733+
"type": "object",
9734+
"properties": {
9735+
"prompt": {
9736+
"type": "string",
9737+
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
9738+
},
9739+
"version": {
9740+
"type": "integer",
9741+
"description": "Version (integer starting at 1, incremented on save)"
9742+
},
9743+
"prompt_id": {
9744+
"type": "string",
9745+
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
9746+
},
9747+
"variables": {
9748+
"type": "array",
9749+
"items": {
9750+
"type": "string"
9751+
},
9752+
"description": "List of prompt variable names that can be used in the prompt template"
9753+
},
9754+
"is_default": {
9755+
"type": "boolean",
9756+
"default": false,
9757+
"description": "Boolean indicating whether this version is the default version for this prompt"
9758+
}
9759+
},
9760+
"additionalProperties": false,
9761+
"required": [
9762+
"version",
9763+
"prompt_id",
9764+
"variables",
9765+
"is_default"
9766+
],
9767+
"title": "Prompt",
9768+
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
9769+
},
96799770
"OpenAIResponseContentPart": {
96809771
"oneOf": [
96819772
{
@@ -10481,44 +10572,6 @@
1048110572
],
1048210573
"title": "CreatePromptRequest"
1048310574
},
10484-
"Prompt": {
10485-
"type": "object",
10486-
"properties": {
10487-
"prompt": {
10488-
"type": "string",
10489-
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
10490-
},
10491-
"version": {
10492-
"type": "integer",
10493-
"description": "Version (integer starting at 1, incremented on save)"
10494-
},
10495-
"prompt_id": {
10496-
"type": "string",
10497-
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
10498-
},
10499-
"variables": {
10500-
"type": "array",
10501-
"items": {
10502-
"type": "string"
10503-
},
10504-
"description": "List of prompt variable names that can be used in the prompt template"
10505-
},
10506-
"is_default": {
10507-
"type": "boolean",
10508-
"default": false,
10509-
"description": "Boolean indicating whether this version is the default version for this prompt"
10510-
}
10511-
},
10512-
"additionalProperties": false,
10513-
"required": [
10514-
"version",
10515-
"prompt_id",
10516-
"variables",
10517-
"is_default"
10518-
],
10519-
"title": "Prompt",
10520-
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
10521-
},
1052210575
"OpenAIDeleteResponseObject": {
1052310576
"type": "object",
1052410577
"properties": {
@@ -13692,6 +13745,10 @@
1369213745
"type": "string",
1369313746
"description": "(Optional) ID of the previous response in a conversation"
1369413747
},
13748+
"prompt": {
13749+
"$ref": "#/components/schemas/Prompt",
13750+
"description": "(Optional) Prompt object with ID, version, and variables"
13751+
},
1369513752
"status": {
1369613753
"type": "string",
1369713754
"description": "Current status of the response generation"

docs/static/llama-stack-spec.yaml

Lines changed: 78 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -6745,6 +6745,34 @@ components:
67456745
OpenAIResponseOutputMessageWebSearchToolCall
67466746
description: >-
67476747
Web search tool call output message for OpenAI responses.
6748+
OpenAIResponsePromptParam:
6749+
type: object
6750+
properties:
6751+
id:
6752+
type: string
6753+
description: Unique identifier of the prompt template
6754+
version:
6755+
type: string
6756+
description: >-
6757+
Version number of the prompt to use (defaults to latest if not specified)
6758+
variables:
6759+
type: object
6760+
additionalProperties:
6761+
oneOf:
6762+
- type: 'null'
6763+
- type: boolean
6764+
- type: number
6765+
- type: string
6766+
- type: array
6767+
- type: object
6768+
description: >-
6769+
Dictionary of variable names to values for template substitution
6770+
additionalProperties: false
6771+
required:
6772+
- id
6773+
title: OpenAIResponsePromptParam
6774+
description: >-
6775+
Prompt object that is used for OpenAI responses.
67486776
OpenAIResponseText:
67496777
type: object
67506778
properties:
@@ -6810,6 +6838,10 @@ components:
68106838
model:
68116839
type: string
68126840
description: The underlying LLM used for completions.
6841+
prompt:
6842+
$ref: '#/components/schemas/OpenAIResponsePromptParam'
6843+
description: >-
6844+
Prompt object with ID, version, and variables.
68136845
instructions:
68146846
type: string
68156847
previous_response_id:
@@ -6899,6 +6931,10 @@ components:
68996931
type: string
69006932
description: >-
69016933
(Optional) ID of the previous response in a conversation
6934+
prompt:
6935+
$ref: '#/components/schemas/Prompt'
6936+
description: >-
6937+
(Optional) Prompt object with ID, version, and variables
69026938
status:
69036939
type: string
69046940
description: >-
@@ -7053,6 +7089,44 @@ components:
70537089
title: OpenAIResponseOutputMessageMCPListTools
70547090
description: >-
70557091
MCP list tools output message containing available tools from an MCP server.
7092+
Prompt:
7093+
type: object
7094+
properties:
7095+
prompt:
7096+
type: string
7097+
description: >-
7098+
The system prompt text with variable placeholders. Variables are only
7099+
supported when using the Responses API.
7100+
version:
7101+
type: integer
7102+
description: >-
7103+
Version (integer starting at 1, incremented on save)
7104+
prompt_id:
7105+
type: string
7106+
description: >-
7107+
Unique identifier formatted as 'pmpt_<48-digit-hash>'
7108+
variables:
7109+
type: array
7110+
items:
7111+
type: string
7112+
description: >-
7113+
List of prompt variable names that can be used in the prompt template
7114+
is_default:
7115+
type: boolean
7116+
default: false
7117+
description: >-
7118+
Boolean indicating whether this version is the default version for this
7119+
prompt
7120+
additionalProperties: false
7121+
required:
7122+
- version
7123+
- prompt_id
7124+
- variables
7125+
- is_default
7126+
title: Prompt
7127+
description: >-
7128+
A prompt resource representing a stored OpenAI Compatible prompt template
7129+
in Llama Stack.
70567130
OpenAIResponseContentPart:
70577131
oneOf:
70587132
- $ref: '#/components/schemas/OpenAIResponseContentPartOutputText'
@@ -7732,44 +7806,6 @@ components:
77327806
required:
77337807
- prompt
77347808
title: CreatePromptRequest
7735-
Prompt:
7736-
type: object
7737-
properties:
7738-
prompt:
7739-
type: string
7740-
description: >-
7741-
The system prompt text with variable placeholders. Variables are only
7742-
supported when using the Responses API.
7743-
version:
7744-
type: integer
7745-
description: >-
7746-
Version (integer starting at 1, incremented on save)
7747-
prompt_id:
7748-
type: string
7749-
description: >-
7750-
Unique identifier formatted as 'pmpt_<48-digit-hash>'
7751-
variables:
7752-
type: array
7753-
items:
7754-
type: string
7755-
description: >-
7756-
List of prompt variable names that can be used in the prompt template
7757-
is_default:
7758-
type: boolean
7759-
default: false
7760-
description: >-
7761-
Boolean indicating whether this version is the default version for this
7762-
prompt
7763-
additionalProperties: false
7764-
required:
7765-
- version
7766-
- prompt_id
7767-
- variables
7768-
- is_default
7769-
title: Prompt
7770-
description: >-
7771-
A prompt resource representing a stored OpenAI Compatible prompt template
7772-
in Llama Stack.
77737809
OpenAIDeleteResponseObject:
77747810
type: object
77757811
properties:
@@ -10163,6 +10199,10 @@ components:
1016310199
type: string
1016410200
description: >-
1016510201
(Optional) ID of the previous response in a conversation
10202+
prompt:
10203+
$ref: '#/components/schemas/Prompt'
10204+
description: >-
10205+
(Optional) Prompt object with ID, version, and variables
1016610206
status:
1016710207
type: string
1016810208
description: >-

llama_stack/apis/agents/agents.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
OpenAIResponseInputTool,
3939
OpenAIResponseObject,
4040
OpenAIResponseObjectStream,
41+
OpenAIResponsePromptParam,
4142
OpenAIResponseText,
4243
)
4344

@@ -711,6 +712,7 @@ async def create_openai_response(
711712
self,
712713
input: str | list[OpenAIResponseInput],
713714
model: str,
715+
prompt: OpenAIResponsePromptParam | None = None,
714716
instructions: str | None = None,
715717
previous_response_id: str | None = None,
716718
store: bool | None = True,
@@ -722,9 +724,9 @@ async def create_openai_response(
722724
max_infer_iters: int | None = 10, # this is an extension to the OpenAI API
723725
) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]:
724726
"""Create a new OpenAI response.
725-
726727
:param input: Input message(s) to create the response.
727728
:param model: The underlying LLM used for completions.
729+
:param prompt: Prompt object with ID, version, and variables.
728730
:param previous_response_id: (Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses.
729731
:param include: (Optional) Additional fields to include in the response.
730732
:returns: An OpenAIResponseObject.

0 commit comments

Comments
 (0)