Skip to content

Commit fadf1d0

Browse files
committed
feat: add Prompts API to Responses API
1 parent 6cce553 commit fadf1d0

File tree

11 files changed

+358
-80
lines changed

11 files changed

+358
-80
lines changed

docs/static/llama-stack-spec.html

Lines changed: 112 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -9195,6 +9195,51 @@
91959195
"title": "OpenAIResponseOutputMessageWebSearchToolCall",
91969196
"description": "Web search tool call output message for OpenAI responses."
91979197
},
9198+
"OpenAIResponsePromptParam": {
9199+
"type": "object",
9200+
"properties": {
9201+
"id": {
9202+
"type": "string",
9203+
"description": "Unique identifier of the prompt template"
9204+
},
9205+
"version": {
9206+
"type": "string",
9207+
"description": "Version number of the prompt to use (defaults to latest if not specified)"
9208+
},
9209+
"variables": {
9210+
"type": "object",
9211+
"additionalProperties": {
9212+
"oneOf": [
9213+
{
9214+
"type": "null"
9215+
},
9216+
{
9217+
"type": "boolean"
9218+
},
9219+
{
9220+
"type": "number"
9221+
},
9222+
{
9223+
"type": "string"
9224+
},
9225+
{
9226+
"type": "array"
9227+
},
9228+
{
9229+
"type": "object"
9230+
}
9231+
]
9232+
},
9233+
"description": "Dictionary of variable names to values for template substitution"
9234+
}
9235+
},
9236+
"additionalProperties": false,
9237+
"required": [
9238+
"id"
9239+
],
9240+
"title": "OpenAIResponsePromptParam",
9241+
"description": "Prompt object that is used for OpenAI responses."
9242+
},
91989243
"OpenAIResponseText": {
91999244
"type": "object",
92009245
"properties": {
@@ -9289,6 +9334,10 @@
92899334
"type": "string",
92909335
"description": "The underlying LLM used for completions."
92919336
},
9337+
"prompt": {
9338+
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
9339+
"description": "Prompt object with ID, version, and variables."
9340+
},
92929341
"instructions": {
92939342
"type": "string"
92949343
},
@@ -9393,6 +9442,10 @@
93939442
"type": "string",
93949443
"description": "(Optional) ID of the previous response in a conversation"
93959444
},
9445+
"prompt": {
9446+
"$ref": "#/components/schemas/Prompt",
9447+
"description": "(Optional) Prompt object with ID, version, and variables"
9448+
},
93969449
"status": {
93979450
"type": "string",
93989451
"description": "Current status of the response generation"
@@ -9584,6 +9637,61 @@
95849637
"title": "OpenAIResponseOutputMessageMCPListTools",
95859638
"description": "MCP list tools output message containing available tools from an MCP server."
95869639
},
9640+
"Prompt": {
9641+
"type": "object",
9642+
"properties": {
9643+
"prompt": {
9644+
"type": "string",
9645+
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
9646+
},
9647+
"version": {
9648+
"type": "integer",
9649+
"description": "Version (integer starting at 1, incremented on save)"
9650+
},
9651+
"prompt_id": {
9652+
"type": "string",
9653+
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
9654+
},
9655+
"variables": {
9656+
"type": "array",
9657+
"items": {
9658+
"type": "string"
9659+
},
9660+
"description": "List of prompt variable names that can be used in the prompt template"
9661+
},
9662+
"is_default": {
9663+
"type": "boolean",
9664+
"default": false,
9665+
"description": "Boolean indicating whether this version is the default version for this prompt"
9666+
}
9667+
},
9668+
"additionalProperties": false,
9669+
"required": [
9670+
"version",
9671+
"prompt_id",
9672+
"variables",
9673+
"is_default"
9674+
],
9675+
"title": "Prompt",
9676+
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
9677+
},
9678+
"OpenAIResponseContentPart": {
9679+
"oneOf": [
9680+
{
9681+
"$ref": "#/components/schemas/OpenAIResponseContentPartOutputText"
9682+
},
9683+
{
9684+
"$ref": "#/components/schemas/OpenAIResponseContentPartRefusal"
9685+
}
9686+
],
9687+
"discriminator": {
9688+
"propertyName": "type",
9689+
"mapping": {
9690+
"output_text": "#/components/schemas/OpenAIResponseContentPartOutputText",
9691+
"refusal": "#/components/schemas/OpenAIResponseContentPartRefusal"
9692+
}
9693+
}
9694+
},
95879695
"OpenAIResponseContentPartOutputText": {
95889696
"type": "object",
95899697
"properties": {
@@ -10460,44 +10568,6 @@
1046010568
],
1046110569
"title": "CreatePromptRequest"
1046210570
},
10463-
"Prompt": {
10464-
"type": "object",
10465-
"properties": {
10466-
"prompt": {
10467-
"type": "string",
10468-
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
10469-
},
10470-
"version": {
10471-
"type": "integer",
10472-
"description": "Version (integer starting at 1, incremented on save)"
10473-
},
10474-
"prompt_id": {
10475-
"type": "string",
10476-
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
10477-
},
10478-
"variables": {
10479-
"type": "array",
10480-
"items": {
10481-
"type": "string"
10482-
},
10483-
"description": "List of prompt variable names that can be used in the prompt template"
10484-
},
10485-
"is_default": {
10486-
"type": "boolean",
10487-
"default": false,
10488-
"description": "Boolean indicating whether this version is the default version for this prompt"
10489-
}
10490-
},
10491-
"additionalProperties": false,
10492-
"required": [
10493-
"version",
10494-
"prompt_id",
10495-
"variables",
10496-
"is_default"
10497-
],
10498-
"title": "Prompt",
10499-
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
10500-
},
1050110571
"OpenAIDeleteResponseObject": {
1050210572
"type": "object",
1050310573
"properties": {
@@ -13614,6 +13684,10 @@
1361413684
"type": "string",
1361513685
"description": "(Optional) ID of the previous response in a conversation"
1361613686
},
13687+
"prompt": {
13688+
"$ref": "#/components/schemas/Prompt",
13689+
"description": "(Optional) Prompt object with ID, version, and variables"
13690+
},
1361713691
"status": {
1361813692
"type": "string",
1361913693
"description": "Current status of the response generation"

docs/static/llama-stack-spec.yaml

Lines changed: 87 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -6679,6 +6679,34 @@ components:
66796679
OpenAIResponseOutputMessageWebSearchToolCall
66806680
description: >-
66816681
Web search tool call output message for OpenAI responses.
6682+
OpenAIResponsePromptParam:
6683+
type: object
6684+
properties:
6685+
id:
6686+
type: string
6687+
description: Unique identifier of the prompt template
6688+
version:
6689+
type: string
6690+
description: >-
6691+
Version number of the prompt to use (defaults to latest if not specified)
6692+
variables:
6693+
type: object
6694+
additionalProperties:
6695+
oneOf:
6696+
- type: 'null'
6697+
- type: boolean
6698+
- type: number
6699+
- type: string
6700+
- type: array
6701+
- type: object
6702+
description: >-
6703+
Dictionary of variable names to values for template substitution
6704+
additionalProperties: false
6705+
required:
6706+
- id
6707+
title: OpenAIResponsePromptParam
6708+
description: >-
6709+
Prompt object that is used for OpenAI responses.
66826710
OpenAIResponseText:
66836711
type: object
66846712
properties:
@@ -6744,6 +6772,10 @@ components:
67446772
model:
67456773
type: string
67466774
description: The underlying LLM used for completions.
6775+
prompt:
6776+
$ref: '#/components/schemas/OpenAIResponsePromptParam'
6777+
description: >-
6778+
Prompt object with ID, version, and variables.
67476779
instructions:
67486780
type: string
67496781
previous_response_id:
@@ -6833,6 +6865,10 @@ components:
68336865
type: string
68346866
description: >-
68356867
(Optional) ID of the previous response in a conversation
6868+
prompt:
6869+
$ref: '#/components/schemas/Prompt'
6870+
description: >-
6871+
(Optional) Prompt object with ID, version, and variables
68366872
status:
68376873
type: string
68386874
description: >-
@@ -6983,6 +7019,53 @@ components:
69837019
title: OpenAIResponseOutputMessageMCPListTools
69847020
description: >-
69857021
MCP list tools output message containing available tools from an MCP server.
7022+
Prompt:
7023+
type: object
7024+
properties:
7025+
prompt:
7026+
type: string
7027+
description: >-
7028+
The system prompt text with variable placeholders. Variables are only
7029+
supported when using the Responses API.
7030+
version:
7031+
type: integer
7032+
description: >-
7033+
Version (integer starting at 1, incremented on save)
7034+
prompt_id:
7035+
type: string
7036+
description: >-
7037+
Unique identifier formatted as 'pmpt_<48-digit-hash>'
7038+
variables:
7039+
type: array
7040+
items:
7041+
type: string
7042+
description: >-
7043+
List of prompt variable names that can be used in the prompt template
7044+
is_default:
7045+
type: boolean
7046+
default: false
7047+
description: >-
7048+
Boolean indicating whether this version is the default version for this
7049+
prompt
7050+
additionalProperties: false
7051+
required:
7052+
- version
7053+
- prompt_id
7054+
- variables
7055+
- is_default
7056+
title: Prompt
7057+
description: >-
7058+
A prompt resource representing a stored OpenAI Compatible prompt template
7059+
in Llama Stack.
7060+
OpenAIResponseContentPart:
7061+
oneOf:
7062+
- $ref: '#/components/schemas/OpenAIResponseContentPartOutputText'
7063+
- $ref: '#/components/schemas/OpenAIResponseContentPartRefusal'
7064+
discriminator:
7065+
propertyName: type
7066+
mapping:
7067+
output_text: '#/components/schemas/OpenAIResponseContentPartOutputText'
7068+
refusal: '#/components/schemas/OpenAIResponseContentPartRefusal'
69867069
OpenAIResponseContentPartOutputText:
69877070
type: object
69887071
properties:
@@ -7697,44 +7780,6 @@ components:
76977780
required:
76987781
- prompt
76997782
title: CreatePromptRequest
7700-
Prompt:
7701-
type: object
7702-
properties:
7703-
prompt:
7704-
type: string
7705-
description: >-
7706-
The system prompt text with variable placeholders. Variables are only
7707-
supported when using the Responses API.
7708-
version:
7709-
type: integer
7710-
description: >-
7711-
Version (integer starting at 1, incremented on save)
7712-
prompt_id:
7713-
type: string
7714-
description: >-
7715-
Unique identifier formatted as 'pmpt_<48-digit-hash>'
7716-
variables:
7717-
type: array
7718-
items:
7719-
type: string
7720-
description: >-
7721-
List of prompt variable names that can be used in the prompt template
7722-
is_default:
7723-
type: boolean
7724-
default: false
7725-
description: >-
7726-
Boolean indicating whether this version is the default version for this
7727-
prompt
7728-
additionalProperties: false
7729-
required:
7730-
- version
7731-
- prompt_id
7732-
- variables
7733-
- is_default
7734-
title: Prompt
7735-
description: >-
7736-
A prompt resource representing a stored OpenAI Compatible prompt template
7737-
in Llama Stack.
77387783
OpenAIDeleteResponseObject:
77397784
type: object
77407785
properties:
@@ -10069,6 +10114,10 @@ components:
1006910114
type: string
1007010115
description: >-
1007110116
(Optional) ID of the previous response in a conversation
10117+
prompt:
10118+
$ref: '#/components/schemas/Prompt'
10119+
description: >-
10120+
(Optional) Prompt object with ID, version, and variables
1007210121
status:
1007310122
type: string
1007410123
description: >-

llama_stack/apis/agents/agents.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
OpenAIResponseInputTool,
3939
OpenAIResponseObject,
4040
OpenAIResponseObjectStream,
41+
OpenAIResponsePromptParam,
4142
OpenAIResponseText,
4243
)
4344

@@ -711,6 +712,7 @@ async def create_openai_response(
711712
self,
712713
input: str | list[OpenAIResponseInput],
713714
model: str,
715+
prompt: OpenAIResponsePromptParam | None = None,
714716
instructions: str | None = None,
715717
previous_response_id: str | None = None,
716718
store: bool | None = True,
@@ -722,9 +724,9 @@ async def create_openai_response(
722724
max_infer_iters: int | None = 10, # this is an extension to the OpenAI API
723725
) -> OpenAIResponseObject | AsyncIterator[OpenAIResponseObjectStream]:
724726
"""Create a new OpenAI response.
725-
726727
:param input: Input message(s) to create the response.
727728
:param model: The underlying LLM used for completions.
729+
:param prompt: Prompt object with ID, version, and variables.
728730
:param previous_response_id: (Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses.
729731
:param include: (Optional) Additional fields to include in the response.
730732
:returns: An OpenAIResponseObject.

0 commit comments

Comments
 (0)