Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion api/OpenAI.net8.0.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1723,6 +1723,8 @@ public class ChatCompletionOptions : IJsonModel<ChatCompletionOptions>, IPersist
[Experimental("OPENAI001")]
public ChatResponseModalities ResponseModalities { get; set; }
[Experimental("OPENAI001")]
public string SafetyIdentifier { get; set; }
[Experimental("OPENAI001")]
public long? Seed { get; set; }
[Experimental("OPENAI001")]
public ChatServiceTier? ServiceTier { get; set; }
Expand Down Expand Up @@ -5598,6 +5600,7 @@ public class OpenAIResponse : IJsonModel<OpenAIResponse>, IPersistableModel<Open
public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; }
public string Instructions { get; }
public int? MaxOutputTokenCount { get; }
public int? MaxToolCallCount { get; }
public IDictionary<string, string> Metadata { get; }
public string Model { get; }
public IList<ResponseItem> OutputItems { get; }
Expand Down Expand Up @@ -5670,7 +5673,7 @@ public class OpenAIResponseClient {
[Experimental("OPENAI001")]
public static class OpenAIResponsesModelFactory {
public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null);
public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable<ResponseItem> outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary<string, string> metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable<ResponseTool> tools = null);
public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, int? maxToolCallCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable<ResponseItem> outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary<string, string> metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable<ResponseTool> tools = null);
public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable<ReasoningSummaryPart> summaryParts = null);
public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null);
public static ReferenceResponseItem ReferenceResponseItem(string id = null);
Expand Down Expand Up @@ -5764,10 +5767,12 @@ public enum ResponseContentPartKind {
[Experimental("OPENAI001")]
public class ResponseCreationOptions : IJsonModel<ResponseCreationOptions>, IPersistableModel<ResponseCreationOptions> {
public bool? BackgroundModeEnabled { get; set; }
public string ConversationId { get; set; }
public string EndUserId { get; set; }
public IList<IncludedResponseProperty> IncludedProperties { get; }
public string Instructions { get; set; }
public int? MaxOutputTokenCount { get; set; }
public int? MaxToolCallCount { get; set; }
public IDictionary<string, string> Metadata { get; }
public bool? ParallelToolCallsEnabled { get; set; }
[Serialization.JsonIgnore]
Expand All @@ -5776,12 +5781,14 @@ public class ResponseCreationOptions : IJsonModel<ResponseCreationOptions>, IPer
public ref JsonPatch Patch { get; }
public string PreviousResponseId { get; set; }
public ResponseReasoningOptions ReasoningOptions { get; set; }
public string SafetyIdentifier { get; set; }
public ResponseServiceTier? ServiceTier { get; set; }
public bool? StoredOutputEnabled { get; set; }
public float? Temperature { get; set; }
public ResponseTextOptions TextOptions { get; set; }
public ResponseToolChoice ToolChoice { get; set; }
public IList<ResponseTool> Tools { get; }
public int? TopLogProbabilityCount { get; set; }
public float? TopP { get; set; }
public ResponseTruncationMode? TruncationMode { get; set; }
protected virtual ResponseCreationOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
Expand Down
8 changes: 7 additions & 1 deletion api/OpenAI.netstandard2.0.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1521,6 +1521,7 @@ public class ChatCompletionOptions : IJsonModel<ChatCompletionOptions>, IPersist
public ChatReasoningEffortLevel? ReasoningEffortLevel { get; set; }
public ChatResponseFormat ResponseFormat { get; set; }
public ChatResponseModalities ResponseModalities { get; set; }
public string SafetyIdentifier { get; set; }
public long? Seed { get; set; }
public ChatServiceTier? ServiceTier { get; set; }
public IList<string> StopSequences { get; }
Expand Down Expand Up @@ -4914,6 +4915,7 @@ public class OpenAIResponse : IJsonModel<OpenAIResponse>, IPersistableModel<Open
public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; }
public string Instructions { get; }
public int? MaxOutputTokenCount { get; }
public int? MaxToolCallCount { get; }
public IDictionary<string, string> Metadata { get; }
public string Model { get; }
public IList<ResponseItem> OutputItems { get; }
Expand Down Expand Up @@ -4981,7 +4983,7 @@ public class OpenAIResponseClient {
}
public static class OpenAIResponsesModelFactory {
public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null);
public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable<ResponseItem> outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary<string, string> metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable<ResponseTool> tools = null);
public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, int? maxToolCallCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable<ResponseItem> outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary<string, string> metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable<ResponseTool> tools = null);
public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable<ReasoningSummaryPart> summaryParts = null);
public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null);
public static ReferenceResponseItem ReferenceResponseItem(string id = null);
Expand Down Expand Up @@ -5065,23 +5067,27 @@ public enum ResponseContentPartKind {
}
public class ResponseCreationOptions : IJsonModel<ResponseCreationOptions>, IPersistableModel<ResponseCreationOptions> {
public bool? BackgroundModeEnabled { get; set; }
public string ConversationId { get; set; }
public string EndUserId { get; set; }
public IList<IncludedResponseProperty> IncludedProperties { get; }
public string Instructions { get; set; }
public int? MaxOutputTokenCount { get; set; }
public int? MaxToolCallCount { get; set; }
public IDictionary<string, string> Metadata { get; }
public bool? ParallelToolCallsEnabled { get; set; }
[Serialization.JsonIgnore]
[EditorBrowsable(EditorBrowsableState.Never)]
public ref JsonPatch Patch { get; }
public string PreviousResponseId { get; set; }
public ResponseReasoningOptions ReasoningOptions { get; set; }
public string SafetyIdentifier { get; set; }
public ResponseServiceTier? ServiceTier { get; set; }
public bool? StoredOutputEnabled { get; set; }
public float? Temperature { get; set; }
public ResponseTextOptions TextOptions { get; set; }
public ResponseToolChoice ToolChoice { get; set; }
public IList<ResponseTool> Tools { get; }
public int? TopLogProbabilityCount { get; set; }
public float? TopP { get; set; }
public ResponseTruncationMode? TruncationMode { get; set; }
protected virtual ResponseCreationOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
Expand Down
9 changes: 0 additions & 9 deletions specification/base/typespec/chat/models.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -192,15 +192,6 @@ model CreateChatCompletionRequest {
search_context_size?: WebSearchContextSize = "medium";
};

@doc("""
An integer between 0 and 20 specifying the number of most likely tokens to
return at each token position, each with an associated log probability.
`logprobs` must be set to `true` if this parameter is used.
""")
@minValue(0)
@maxValue(20)
top_logprobs?: int32 | null;

// Tool customization: apply a named union type
@doc("""
An object specifying the format that the model must output.
Expand Down
7 changes: 7 additions & 0 deletions specification/base/typespec/common/models.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,9 @@ model ModelResponsePropertiesForRequest {
@minValue(0)
@maxValue(2)
temperature?: float32 | null = 1;

/** An integer between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability. */
top_logprobs?: int32 | null;

@doc("""
An alternative to sampling with temperature, called nucleus sampling,
Expand All @@ -265,6 +268,10 @@ model ModelResponsePropertiesForRequest {
/** A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices#end-user-ids). */
user?: string;

/**A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies.
The IDs should be a string that uniquely identifies each user. We recommend hashing their username or email address, in order to avoid sending us any identifying information. [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers).*/
safety_identifier?: string;

service_tier?: ServiceTier;
}
model ModelResponsePropertiesForResponse {
Expand Down
22 changes: 22 additions & 0 deletions specification/base/typespec/responses/models.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,25 @@ model CreateResponse {
* for more information.
*/
stream?: boolean | null = false;

/** The conversation that this response belongs to.
* Items from this conversation are prepended to input_items for this response request.
* Input items and output items from this response are automatically added to this conversation after this response completes. */
conversation?: ConversationParam | null;
}

/**The conversation that this response belongs to. Items from this conversation are prepended to `input_items` for this response request.
Input items and output items from this response are automatically added to this conversation after this response completes.*/
union ConversationParam {
string,
`ConversationParam-2`,
}

/** The conversation that this response belongs to. */
@summary("Conversation object")
model `ConversationParam-2` {
/** The unique ID of the conversation. */
id: string;
}

model Response {
Expand Down Expand Up @@ -178,6 +197,9 @@ model ResponseProperties {
/** An upper bound for the number of tokens that can be generated for a response, including visible output tokens and [reasoning tokens](/docs/guides/reasoning). */
max_output_tokens?: int32 | null;

/** The maximum number of total calls to built-in tools that can be processed in a response. This maximum number applies across all built-in tool calls, not per individual tool. Any further attempts to call a tool by the model will be ignored. */
max_tool_calls?: int32 | null;

@doc("""
Inserts a system (or developer) message as the first item in the model's context.

Expand Down
2 changes: 2 additions & 0 deletions specification/client/responses.client.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ using TypeSpec.HttpClient.CSharp;

@@alternateType(CreateResponse.service_tier, DotNetResponseServiceTier);
@@alternateType(Response.service_tier, DotNetResponseServiceTier);
@@alternateType(CreateResponse.conversation, string);
@@clientName(CreateResponse.conversation, "ConversationId");

// ------------ ItemResources ------------
@@usage(ItemResource, Usage.input | Usage.output);
Expand Down
4 changes: 4 additions & 0 deletions src/Custom/Responses/OpenAIResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ public partial class OpenAIResponse
[CodeGenMember("MaxOutputTokens")]
public int? MaxOutputTokenCount { get; }

// CUSTOM: Renamed.
[CodeGenMember("MaxToolCalls")]
public int? MaxToolCallCount { get; }

// CUSTOM: Renamed.
[CodeGenMember("Text")]
public ResponseTextOptions TextOptions { get; }
Expand Down
2 changes: 2 additions & 0 deletions src/Custom/Responses/OpenAIResponsesModelFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ public static OpenAIResponse OpenAIResponse(
string endUserId = null,
ResponseReasoningOptions reasoningOptions = null,
int? maxOutputTokenCount = null,
int? maxToolCallCount = null,
ResponseTextOptions textOptions = null,
ResponseTruncationMode? truncationMode = null,
ResponseIncompleteStatusDetails incompleteStatusDetails = null,
Expand Down Expand Up @@ -57,6 +58,7 @@ public static OpenAIResponse OpenAIResponse(
endUserId: endUserId,
reasoningOptions: reasoningOptions,
maxOutputTokenCount: maxOutputTokenCount,
maxToolCallCount: maxToolCallCount,
textOptions: textOptions,
truncationMode: truncationMode,
incompleteStatusDetails: incompleteStatusDetails,
Expand Down
8 changes: 8 additions & 0 deletions src/Custom/Responses/ResponseCreationOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ public partial class ResponseCreationOptions
[CodeGenMember("MaxOutputTokens")]
public int? MaxOutputTokenCount { get; set; }

// CUSTOM: Renamed.
[CodeGenMember("MaxToolCalls")]
public int? MaxToolCallCount { get; set; }

// CUSTOM: Renamed.
[CodeGenMember("Text")]
public ResponseTextOptions TextOptions { get; set; }
Expand Down Expand Up @@ -73,6 +77,10 @@ public partial class ResponseCreationOptions
[CodeGenMember("Tools")]
public IList<ResponseTool> Tools { get; }

// CUSTOM: Renamed.
[CodeGenMember("TopLogprobs")]
public int? TopLogProbabilityCount { get; set; }

internal ResponseCreationOptions GetClone()
{
ResponseCreationOptions copiedOptions = (ResponseCreationOptions)this.MemberwiseClone();
Expand Down
Loading
Loading