Skip to content

Commit 72e35ad

Browse files
stainless-app[bot]meorphis
authored andcommitted
feat(api): add reusable prompt IDs
1 parent 7212e61 commit 72e35ad

34 files changed

+600
-58
lines changed

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 109
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-3ae9c18dd7ccfc3ac5206f24394665f563a19015cfa8847b2801a2694d012abc.yml
3-
openapi_spec_hash: 48175b03b58805cd5c80793c66fd54e5
4-
config_hash: 4caff63b74a41f71006987db702f2918
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9e41d2d5471d2c28bff0d616f4476f5b0e6c541ef4cb51bdaaef5fdf5e13c8b2.yml
3+
openapi_spec_hash: 86f765e18d00e32cf2ce9db7ab84d946
4+
config_hash: fd2af1d5eff0995bb7dc02ac9a34851d

lib/openai.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -416,6 +416,7 @@
416416
require_relative "openai/models/responses/response_output_refusal"
417417
require_relative "openai/models/responses/response_output_text"
418418
require_relative "openai/models/responses/response_output_text_annotation_added_event"
419+
require_relative "openai/models/responses/response_prompt"
419420
require_relative "openai/models/responses/response_queued_event"
420421
require_relative "openai/models/responses/response_reasoning_delta_event"
421422
require_relative "openai/models/responses/response_reasoning_done_event"

lib/openai/models/chat/chat_completion.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,7 @@ module ServiceTier
213213
AUTO = :auto
214214
DEFAULT = :default
215215
FLEX = :flex
216+
SCALE = :scale
216217

217218
# @!method self.values
218219
# @return [Array<Symbol>]

lib/openai/models/chat/chat_completion_chunk.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -396,6 +396,7 @@ module ServiceTier
396396
AUTO = :auto
397397
DEFAULT = :default
398398
FLEX = :flex
399+
SCALE = :scale
399400

400401
# @!method self.values
401402
# @return [Array<Symbol>]

lib/openai/models/chat/completion_create_params.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -569,6 +569,7 @@ module ServiceTier
569569
AUTO = :auto
570570
DEFAULT = :default
571571
FLEX = :flex
572+
SCALE = :scale
572573

573574
# @!method self.values
574575
# @return [Array<Symbol>]

lib/openai/models/fine_tuning/job_create_params.rb

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel
3131
# [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input)
3232
# format.
3333
#
34-
# See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning)
34+
# See the
35+
# [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization)
3536
# for more details.
3637
#
3738
# @return [String]
@@ -100,7 +101,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel
100101
# Your dataset must be formatted as a JSONL file. You must upload your file with
101102
# the purpose `fine-tune`.
102103
#
103-
# See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning)
104+
# See the
105+
# [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization)
104106
# for more details.
105107
#
106108
# @return [String, nil]

lib/openai/models/image_edit_params.rb

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,22 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel
6161
# @return [Integer, nil]
6262
optional :n, Integer, nil?: true
6363

64+
# @!attribute output_compression
65+
# The compression level (0-100%) for the generated images. This parameter is only
66+
# supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
67+
# defaults to 100.
68+
#
69+
# @return [Integer, nil]
70+
optional :output_compression, Integer, nil?: true
71+
72+
# @!attribute output_format
73+
# The format in which the generated images are returned. This parameter is only
74+
# supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The
75+
# default value is `png`.
76+
#
77+
# @return [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil]
78+
optional :output_format, enum: -> { OpenAI::ImageEditParams::OutputFormat }, nil?: true
79+
6480
# @!attribute quality
6581
# The quality of the image that will be generated. `high`, `medium` and `low` are
6682
# only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality.
@@ -94,7 +110,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel
94110
# @return [String, nil]
95111
optional :user, String
96112

97-
# @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {})
113+
# @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {})
98114
# Some parameter documentations has been truncated, see
99115
# {OpenAI::Models::ImageEditParams} for more details.
100116
#
@@ -110,6 +126,10 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel
110126
#
111127
# @param n [Integer, nil] The number of images to generate. Must be between 1 and 10.
112128
#
129+
# @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter
130+
#
131+
# @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is
132+
#
113133
# @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are
114134
#
115135
# @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or `
@@ -174,6 +194,20 @@ module Model
174194
# @return [Array(String, Symbol, OpenAI::Models::ImageModel)]
175195
end
176196

197+
# The format in which the generated images are returned. This parameter is only
198+
# supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The
199+
# default value is `png`.
200+
module OutputFormat
201+
extend OpenAI::Internal::Type::Enum
202+
203+
PNG = :png
204+
JPEG = :jpeg
205+
WEBP = :webp
206+
207+
# @!method self.values
208+
# @return [Array<Symbol>]
209+
end
210+
177211
# The quality of the image that will be generated. `high`, `medium` and `low` are
178212
# only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality.
179213
# Defaults to `auto`.

lib/openai/models/responses/response.rb

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,14 @@ class Response < OpenAI::Internal::Type::BaseModel
3232
required :incomplete_details, -> { OpenAI::Responses::Response::IncompleteDetails }, nil?: true
3333

3434
# @!attribute instructions
35-
# Inserts a system (or developer) message as the first item in the model's
36-
# context.
35+
# A system (or developer) message inserted into the model's context.
3736
#
3837
# When using along with `previous_response_id`, the instructions from a previous
3938
# response will not be carried over to the next response. This makes it simple to
4039
# swap out system (or developer) messages in new responses.
4140
#
42-
# @return [String, nil]
43-
required :instructions, String, nil?: true
41+
# @return [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil]
42+
required :instructions, union: -> { OpenAI::Responses::Response::Instructions }, nil?: true
4443

4544
# @!attribute metadata
4645
# Set of 16 key-value pairs that can be attached to an object. This can be useful
@@ -156,6 +155,13 @@ class Response < OpenAI::Internal::Type::BaseModel
156155
# @return [String, nil]
157156
optional :previous_response_id, String, nil?: true
158157

158+
# @!attribute prompt
159+
# Reference to a prompt template and its variables.
160+
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
161+
#
162+
# @return [OpenAI::Models::Responses::ResponsePrompt, nil]
163+
optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true
164+
159165
# @!attribute reasoning
160166
# **o-series models only**
161167
#
@@ -231,7 +237,7 @@ class Response < OpenAI::Internal::Type::BaseModel
231237
# @return [String, nil]
232238
optional :user, String
233239

234-
# @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response)
240+
# @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response)
235241
# Some parameter documentations has been truncated, see
236242
# {OpenAI::Models::Responses::Response} for more details.
237243
#
@@ -243,7 +249,7 @@ class Response < OpenAI::Internal::Type::BaseModel
243249
#
244250
# @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete.
245251
#
246-
# @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context
252+
# @param instructions [String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>, nil] A system (or developer) message inserted into the model's context.
247253
#
248254
# @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be
249255
#
@@ -267,6 +273,8 @@ class Response < OpenAI::Internal::Type::BaseModel
267273
#
268274
# @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
269275
#
276+
# @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables.
277+
#
270278
# @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only**
271279
#
272280
# @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is
@@ -310,6 +318,32 @@ module Reason
310318
end
311319
end
312320

321+
# A system (or developer) message inserted into the model's context.
322+
#
323+
# When using along with `previous_response_id`, the instructions from a previous
324+
# response will not be carried over to the next response. This makes it simple to
325+
# swap out system (or developer) messages in new responses.
326+
#
327+
# @see OpenAI::Models::Responses::Response#instructions
328+
module Instructions
329+
extend OpenAI::Internal::Type::Union
330+
331+
# A text input to the model, equivalent to a text input with the
332+
# `developer` role.
333+
variant String
334+
335+
# A list of one or many input items to the model, containing
336+
# different content types.
337+
variant -> { OpenAI::Models::Responses::Response::Instructions::ResponseInputItemArray }
338+
339+
# @!method self.variants
340+
# @return [Array(String, Array<OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference>)]
341+
342+
# @type [OpenAI::Internal::Type::Converter]
343+
ResponseInputItemArray =
344+
OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputItem }]
345+
end
346+
313347
# How the model should select which tool (or tools) to use when generating a
314348
# response. See the `tools` parameter to see how to specify which tools the model
315349
# can call.
@@ -364,6 +398,7 @@ module ServiceTier
364398
AUTO = :auto
365399
DEFAULT = :default
366400
FLEX = :flex
401+
SCALE = :scale
367402

368403
# @!method self.values
369404
# @return [Array<Symbol>]

lib/openai/models/responses/response_create_params.rb

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel
6464
nil?: true
6565

6666
# @!attribute instructions
67-
# Inserts a system (or developer) message as the first item in the model's
68-
# context.
67+
# A system (or developer) message inserted into the model's context.
6968
#
7069
# When using along with `previous_response_id`, the instructions from a previous
7170
# response will not be carried over to the next response. This makes it simple to
@@ -107,6 +106,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel
107106
# @return [String, nil]
108107
optional :previous_response_id, String, nil?: true
109108

109+
# @!attribute prompt
110+
# Reference to a prompt template and its variables.
111+
# [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts).
112+
#
113+
# @return [OpenAI::Models::Responses::ResponsePrompt, nil]
114+
optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true
115+
110116
# @!attribute reasoning
111117
# **o-series models only**
112118
#
@@ -226,7 +232,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel
226232
# @return [String, nil]
227233
optional :user, String
228234

229-
# @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
235+
# @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {})
230236
# Some parameter documentations has been truncated, see
231237
# {OpenAI::Models::Responses::ResponseCreateParams} for more details.
232238
#
@@ -238,7 +244,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel
238244
#
239245
# @param include [Array<Symbol, OpenAI::Models::Responses::ResponseIncludable>, nil] Specify additional output data to include in the model response. Currently
240246
#
241-
# @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context
247+
# @param instructions [String, nil] A system (or developer) message inserted into the model's context.
242248
#
243249
# @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in
244250
#
@@ -248,6 +254,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel
248254
#
249255
# @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to
250256
#
257+
# @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables.
258+
#
251259
# @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only**
252260
#
253261
# @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is
@@ -317,6 +325,7 @@ module ServiceTier
317325
AUTO = :auto
318326
DEFAULT = :default
319327
FLEX = :flex
328+
SCALE = :scale
320329

321330
# @!method self.values
322331
# @return [Array<Symbol>]

0 commit comments

Comments
 (0)