From c3fff0f5d682ab8f2ad3acc90cca98cd15940e7a Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 30 Oct 2025 17:04:14 -0500 Subject: [PATCH 01/15] CreateResponse --- .../CreateResponseOptions.Serialization.cs | 604 ++++++++++++++ src/Custom/Responses/CreateResponseOptions.cs | 129 +++ .../Responses/Includable.Serialization.cs | 42 + src/Custom/Responses/Includable.cs | 43 + .../OpenAIResponseClient.Protocol.cs | 28 + src/Custom/Responses/OpenAIResponseClient.cs | 16 + .../Responses/ResponseResult.Serialization.cs | 734 ++++++++++++++++++ src/Custom/Responses/ResponseResult.cs | 120 +++ .../OpenAIResponseClient.RestClient.cs | 29 - 9 files changed, 1716 insertions(+), 29 deletions(-) create mode 100644 src/Custom/Responses/CreateResponseOptions.Serialization.cs create mode 100644 src/Custom/Responses/CreateResponseOptions.cs create mode 100644 src/Custom/Responses/Includable.Serialization.cs create mode 100644 src/Custom/Responses/Includable.cs create mode 100644 src/Custom/Responses/ResponseResult.Serialization.cs create mode 100644 src/Custom/Responses/ResponseResult.cs diff --git a/src/Custom/Responses/CreateResponseOptions.Serialization.cs b/src/Custom/Responses/CreateResponseOptions.Serialization.cs new file mode 100644 index 000000000..f67adfe24 --- /dev/null +++ b/src/Custom/Responses/CreateResponseOptions.Serialization.cs @@ -0,0 +1,604 @@ +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using OpenAI.Responses; + +namespace OpenAI.Responses +{ + public partial class CreateResponseOptions : IJsonModel + { + internal CreateResponseOptions() : this(null, default, default, null, default, null, default, null, default, default, null, null, null, null, default, null, null, default, default, default, default) + { + } + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Patch.Contains("$"u8)) + { + writer.WriteRawValue(Patch.GetJson("$"u8)); + return; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{format}' format."); + } +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Optional.IsCollectionDefined(Metadata) && !Patch.Contains("$.metadata"u8)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartObject(); +#if NET8_0_OR_GREATER + global::System.Span buffer = stackalloc byte[256]; +#endif + foreach (var item in Metadata) + { +#if NET8_0_OR_GREATER + int bytesWritten = global::System.Text.Encoding.UTF8.GetBytes(item.Key.AsSpan(), buffer); + bool patchContains = (bytesWritten == 256) ? Patch.Contains("$.metadata"u8, global::System.Text.Encoding.UTF8.GetBytes(item.Key)) : Patch.Contains("$.metadata"u8, buffer.Slice(0, bytesWritten)); +#else + bool patchContains = Patch.Contains("$.metadata"u8, Encoding.UTF8.GetBytes(item.Key)); +#endif + if (!patchContains) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + } + + Patch.WriteTo(writer, "$.metadata"u8); + writer.WriteEndObject(); + } + if (Optional.IsDefined(Temperature) && !Patch.Contains("$.temperature"u8)) + { + writer.WritePropertyName("temperature"u8); + writer.WriteNumberValue(Temperature.Value); + } + if (Optional.IsDefined(TopP) && !Patch.Contains("$.top_p"u8)) + { + writer.WritePropertyName("top_p"u8); + writer.WriteNumberValue(TopP.Value); + } + if (Optional.IsDefined(User) && !Patch.Contains("$.user"u8)) + { + writer.WritePropertyName("user"u8); + writer.WriteStringValue(User); + } + if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) + { + writer.WritePropertyName("service_tier"u8); + writer.WriteStringValue(ServiceTier.Value.ToString()); + } + if (Optional.IsDefined(PreviousResponseId) && !Patch.Contains("$.previous_response_id"u8)) + { + writer.WritePropertyName("previous_response_id"u8); + writer.WriteStringValue(PreviousResponseId); + } + if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) + { + writer.WritePropertyName("model"u8); + writer.WriteStringValue(Model.Value.ToString()); + } + if (Optional.IsDefined(Reasoning) && !Patch.Contains("$.reasoning"u8)) + { + writer.WritePropertyName("reasoning"u8); + writer.WriteObjectValue(Reasoning, options); + } + if (Optional.IsDefined(Background) && !Patch.Contains("$.background"u8)) + { + writer.WritePropertyName("background"u8); + writer.WriteBooleanValue(Background.Value); + } + if (Optional.IsDefined(MaxOutputTokens) && !Patch.Contains("$.max_output_tokens"u8)) + { + writer.WritePropertyName("max_output_tokens"u8); + writer.WriteNumberValue(MaxOutputTokens.Value); + } + if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) + { + writer.WritePropertyName("instructions"u8); + writer.WriteStringValue(Instructions); + } + if (Optional.IsDefined(Text) && !Patch.Contains("$.text"u8)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(Text, options); + } + if (Patch.Contains("$.tools"u8)) + { + if (!Patch.IsRemoved("$.tools"u8)) + { + writer.WritePropertyName("tools"u8); + writer.WriteRawValue(Patch.GetJson("$.tools"u8)); + } + } + else if (Optional.IsCollectionDefined(Tools)) + { + writer.WritePropertyName("tools"u8); + writer.WriteStartArray(); + for (int i = 0; i < Tools.Count; i++) + { + if (Tools[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Tools[i], options); + } + Patch.WriteTo(writer, "$.tools"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) + { + writer.WritePropertyName("tool_choice"u8); + writer.WriteObjectValue(ToolChoice, options); + } + if (Optional.IsDefined(Truncation) && !Patch.Contains("$.truncation"u8)) + { + writer.WritePropertyName("truncation"u8); + writer.WriteStringValue(Truncation.Value.ToString()); + } + if (Patch.Contains("$.input"u8)) + { + if (!Patch.IsRemoved("$.input"u8)) + { + writer.WritePropertyName("input"u8); + writer.WriteRawValue(Patch.GetJson("$.input"u8)); + } + } + else + { + writer.WritePropertyName("input"u8); + writer.WriteStartArray(); + for (int i = 0; i < Input.Count; i++) + { + if (Input[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Input[i], options); + } + Patch.WriteTo(writer, "$.input"u8); + writer.WriteEndArray(); + } + if (Patch.Contains("$.include"u8)) + { + if (!Patch.IsRemoved("$.include"u8)) + { + writer.WritePropertyName("include"u8); + writer.WriteRawValue(Patch.GetJson("$.include"u8)); + } + } + else if (Optional.IsCollectionDefined(Include)) + { + writer.WritePropertyName("include"u8); + writer.WriteStartArray(); + for (int i = 0; i < Include.Count; i++) + { + if (Patch.IsRemoved(Encoding.UTF8.GetBytes($"$.include[{i}]"))) + { + continue; + } + writer.WriteStringValue(Include[i].ToSerialString()); + } + Patch.WriteTo(writer, "$.include"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(ParallelToolCalls) && !Patch.Contains("$.parallel_tool_calls"u8)) + { + writer.WritePropertyName("parallel_tool_calls"u8); + writer.WriteBooleanValue(ParallelToolCalls.Value); + } + if (Optional.IsDefined(Store) && !Patch.Contains("$.store"u8)) + { + writer.WritePropertyName("store"u8); + writer.WriteBooleanValue(Store.Value); + } + if (Optional.IsDefined(Stream) && !Patch.Contains("$.stream"u8)) + { + writer.WritePropertyName("stream"u8); + writer.WriteBooleanValue(Stream.Value); + } + + Patch.WriteTo(writer); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } + + CreateResponseOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateResponseOptions(document.RootElement, null, options); + } + + internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonElement element, BinaryData data, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary metadata = default; + float? temperature = default; + float? topP = default; + string user = default; + ResponseServiceTier? serviceTier = default; + string previousResponseId = default; + InternalModelIdsResponses? model = default; + ResponseReasoningOptions reasoning = default; + bool? background = default; + int? maxOutputTokens = default; + string instructions = default; + ResponseTextOptions text = default; + IList tools = default; + ResponseToolChoice toolChoice = default; + ResponseTruncationMode? truncation = default; + IList input = default; + IList include = default; + bool? parallelToolCalls = default; + bool? store = default; + bool? stream = default; +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("metadata"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + metadata = dictionary; + continue; + } + if (prop.NameEquals("temperature"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + temperature = null; + continue; + } + temperature = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("top_p"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + topP = null; + continue; + } + topP = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("user"u8)) + { + user = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("service_tier"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + serviceTier = new ResponseServiceTier(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("previous_response_id"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + previousResponseId = null; + continue; + } + previousResponseId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("model"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + model = new InternalModelIdsResponses(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("reasoning"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + reasoning = null; + continue; + } + reasoning = ResponseReasoningOptions.DeserializeResponseReasoningOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("background"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + background = null; + continue; + } + background = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("max_output_tokens"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + maxOutputTokens = null; + continue; + } + maxOutputTokens = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("instructions"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + instructions = null; + continue; + } + instructions = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("text"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + text = ResponseTextOptions.DeserializeResponseTextOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("tools"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseTool.DeserializeResponseTool(item, item.GetUtf8Bytes(), options)); + } + tools = array; + continue; + } + if (prop.NameEquals("tool_choice"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + toolChoice = ResponseToolChoice.DeserializeResponseToolChoice(prop.Value, options); + continue; + } + if (prop.NameEquals("truncation"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + truncation = null; + continue; + } + truncation = new ResponseTruncationMode(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("input"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); + } + input = array; + continue; + } + if (prop.NameEquals("include"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(item.GetString().ToIncludable()); + } + include = array; + continue; + } + if (prop.NameEquals("parallel_tool_calls"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + parallelToolCalls = null; + continue; + } + parallelToolCalls = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("store"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + store = null; + continue; + } + store = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("stream"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + stream = null; + continue; + } + stream = prop.Value.GetBoolean(); + continue; + } + patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); + } + return new CreateResponseOptions( + metadata ?? new ChangeTrackingDictionary(), + temperature, + topP, + user, + serviceTier, + previousResponseId, + model, + reasoning, + background, + maxOutputTokens, + instructions, + text, + tools ?? new ChangeTrackingList(), + toolChoice, + truncation, + input, + include ?? new ChangeTrackingList(), + parallelToolCalls, + store, + stream, + patch); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, OpenAIContext.Default); + default: + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{options.Format}' format."); + } + } + + CreateResponseOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data)) + { + return DeserializeCreateResponseOptions(document.RootElement, data, options); + } + default: + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + value = default; + + if (local.StartsWith("reasoning"u8)) + { + return Reasoning.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); + } + if (local.StartsWith("text"u8)) + { + return Text.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return Tools[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + + if (local.StartsWith("reasoning"u8)) + { + Reasoning.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); + return true; + } + if (local.StartsWith("text"u8)) + { + Text.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); + return true; + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + Tools[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions) + { + if (createResponseOptions == null) + { + return null; + } + return BinaryContent.Create(createResponseOptions, ModelSerializationExtensions.WireOptions); + } + } +} diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs new file mode 100644 index 000000000..b516b6a89 --- /dev/null +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -0,0 +1,129 @@ +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class CreateResponseOptions + { + [Experimental("SCME0001")] + private JsonPatch _patch; + + public CreateResponseOptions(List input) + { + Argument.AssertNotNull(input, nameof(input)); + + Metadata = new ChangeTrackingDictionary(); + Tools = new ChangeTrackingList(); + Input = input; + Include = new ChangeTrackingList(); + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, IList input, IList include, bool? parallelToolCalls, bool? store, bool? stream, in JsonPatch patch) + { + // Plugin customization: ensure initialization of collections + Metadata = metadata ?? new ChangeTrackingDictionary(); + Temperature = temperature; + TopP = topP; + User = user; + ServiceTier = serviceTier; + PreviousResponseId = previousResponseId; + Model = model; + Reasoning = reasoning; + Background = background; + MaxOutputTokens = maxOutputTokens; + Instructions = instructions; + Text = text; + Tools = tools ?? new ChangeTrackingList(); + ToolChoice = toolChoice; + Truncation = truncation; + Input = input; + Include = include ?? new ChangeTrackingList(); + ParallelToolCalls = parallelToolCalls; + Store = store; + Stream = stream; + _patch = patch; + _patch.SetPropagators(PropagateSet, PropagateGet); + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch => ref _patch; + + public IDictionary Metadata { get; } + + public float? Temperature { get; set; } + + public float? TopP { get; set; } + + public string User { get; set; } + + public ResponseServiceTier? ServiceTier { get; set; } + + public string PreviousResponseId { get; set; } + + internal InternalModelIdsResponses? Model { get; set; } + + public ResponseReasoningOptions Reasoning { get; set; } + + public bool? Background { get; set; } + + public int? MaxOutputTokens { get; set; } + + public string Instructions { get; set; } + + public ResponseTextOptions Text { get; set; } + + public IList Tools { get; } + + public ResponseToolChoice ToolChoice { get; set; } + + public ResponseTruncationMode? Truncation { get; set; } + + public IList Input { get; } + + public IList Include { get; set; } + + public bool? ParallelToolCalls { get; set; } + + public bool? Store { get; set; } + + public bool? Stream { get; set; } + + public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) + { + Argument.AssertNotNull(inputItems, nameof(inputItems)); + options ??= new(); + var responseCreationOptions = client.CreatePerCallOptions(options, inputItems, isStreaming); + + return new CreateResponseOptions( + responseCreationOptions.Metadata, + responseCreationOptions.Temperature, + responseCreationOptions.TopP, + responseCreationOptions.EndUserId, + responseCreationOptions.ServiceTier, + responseCreationOptions.PreviousResponseId, + responseCreationOptions.Model, + responseCreationOptions.ReasoningOptions, + responseCreationOptions.BackgroundModeEnabled, + responseCreationOptions.MaxOutputTokenCount, + responseCreationOptions.Instructions, + responseCreationOptions.TextOptions, + responseCreationOptions.Tools, + responseCreationOptions.ToolChoice, + responseCreationOptions.TruncationMode, + inputItems.ToList(), + [.. responseCreationOptions.Include.Select(x => x.ToIncludable())], + responseCreationOptions.ParallelToolCallsEnabled, + responseCreationOptions.StoredOutputEnabled, + responseCreationOptions.Stream, + new JsonPatch()); + } + } +} diff --git a/src/Custom/Responses/Includable.Serialization.cs b/src/Custom/Responses/Includable.Serialization.cs new file mode 100644 index 000000000..3edc8f6aa --- /dev/null +++ b/src/Custom/Responses/Includable.Serialization.cs @@ -0,0 +1,42 @@ +using System; + +namespace OpenAI.Responses +{ + internal static partial class IncludableExtensions + { + public static string ToSerialString(this Includable value) => value switch + { + Includable.FileSearchCallResults => "file_search_call.results", + Includable.MessageInputImageImageUrl => "message.input_image.image_url", + Includable.ComputerCallOutputOutputImageUrl => "computer_call_output.output.image_url", + Includable.ReasoningEncryptedContent => "reasoning.encrypted_content", + Includable.CodeInterpreterCallOutputs => "code_interpreter_call.outputs", + _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown Includable value.") + }; + + public static Includable ToIncludable(this string value) + { + if (StringComparer.OrdinalIgnoreCase.Equals(value, "file_search_call.results")) + { + return Includable.FileSearchCallResults; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "message.input_image.image_url")) + { + return Includable.MessageInputImageImageUrl; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "computer_call_output.output.image_url")) + { + return Includable.ComputerCallOutputOutputImageUrl; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "reasoning.encrypted_content")) + { + return Includable.ReasoningEncryptedContent; + } + if (StringComparer.OrdinalIgnoreCase.Equals(value, "code_interpreter_call.outputs")) + { + return Includable.CodeInterpreterCallOutputs; + } + throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown Includable value."); + } + } +} diff --git a/src/Custom/Responses/Includable.cs b/src/Custom/Responses/Includable.cs new file mode 100644 index 000000000..ae7c4b568 --- /dev/null +++ b/src/Custom/Responses/Includable.cs @@ -0,0 +1,43 @@ +using System; +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public enum Includable + { + FileSearchCallResults, + MessageInputImageImageUrl, + ComputerCallOutputOutputImageUrl, + ReasoningEncryptedContent, + CodeInterpreterCallOutputs + } + + internal static partial class IncludableExtensions + { + internal static Includable ToIncludable(this InternalIncludable internalIncludable) + { + if (internalIncludable == InternalIncludable.FileSearchCallResults) + { + return Includable.FileSearchCallResults; + } + if (internalIncludable == InternalIncludable.MessageInputImageImageUrl) + { + return Includable.MessageInputImageImageUrl; + } + if (internalIncludable == InternalIncludable.ComputerCallOutputOutputImageUrl) + { + return Includable.ComputerCallOutputOutputImageUrl; + } + if (internalIncludable == InternalIncludable.ReasoningEncryptedContent) + { + return Includable.ReasoningEncryptedContent; + } + if (internalIncludable == InternalIncludable.CodeInterpreterCallOutputs) + { + return Includable.CodeInterpreterCallOutputs; + } + throw new ArgumentException($"Unknown InternalIncludable value: {internalIncludable}", nameof(internalIncludable)); + } + } +} diff --git a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs index 234711a31..3ff7e5340 100644 --- a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs +++ b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs @@ -32,4 +32,32 @@ public virtual ClientResult GetResponse(string responseId, bool? stream, int? st PipelineResponse protocolResponse = Pipeline.ProcessMessage(message, options); return ClientResult.FromResponse(protocolResponse); } + + internal virtual PipelineMessage CreateGetResponseRequest(string responseId, IEnumerable includables, bool? stream, int? startingAfter, RequestOptions options) + { + ClientUriBuilder uri = new ClientUriBuilder(); + uri.Reset(_endpoint); + uri.AppendPath("/responses/", false); + uri.AppendPath(responseId, true); + if (includables != null && !(includables is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + foreach (var @param in includables) + { + uri.AppendQuery("include[]", @param.ToSerialString(), true); + } + } + if (stream != null) + { + uri.AppendQuery("stream", TypeFormatters.ConvertToString(stream), true); + } + if (startingAfter != null) + { + uri.AppendQuery("starting_after", TypeFormatters.ConvertToString(startingAfter), true); + } + PipelineMessage message = Pipeline.CreateMessage(uri.ToUri(), "GET", PipelineMessageClassifier200); + PipelineRequest request = message.Request; + request.Headers.Set("Accept", "application/json, text/event-stream"); + message.Apply(options); + return message; + } } \ No newline at end of file diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index 57183d55c..49c98d662 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -176,6 +176,22 @@ public virtual ClientResult CreateResponse(string userInputText, cancellationToken); } + public virtual ClientResult CreateResponse(CreateResponseOptions requestBody, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(requestBody, nameof(requestBody)); + + ClientResult result = this.CreateResponse(requestBody, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null); + return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); + } + + public virtual async Task> CreateResponseAsync(CreateResponseOptions requestBody, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(requestBody, nameof(requestBody)); + + ClientResult result = await this.CreateResponseAsync(requestBody, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null).ConfigureAwait(false); + return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); + } + public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { return CreateResponseStreamingAsync(inputItems, options, cancellationToken.ToRequestOptions(streaming: true)); diff --git a/src/Custom/Responses/ResponseResult.Serialization.cs b/src/Custom/Responses/ResponseResult.Serialization.cs new file mode 100644 index 000000000..760d050d2 --- /dev/null +++ b/src/Custom/Responses/ResponseResult.Serialization.cs @@ -0,0 +1,734 @@ +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; + +namespace OpenAI.Responses +{ + public partial class ResponseResult : IJsonModel + { + internal ResponseResult() : this(null, default, default, null, default, null, default, null, default, default, null, null, null, null, default, null, null, default, default, null, null, null, null, null, default, default) + { + } + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Patch.Contains("$"u8)) + { + writer.WriteRawValue(Patch.GetJson("$"u8)); + return; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResponseResult)} does not support writing '{format}' format."); + } +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Optional.IsCollectionDefined(Metadata) && !Patch.Contains("$.metadata"u8)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartObject(); +#if NET8_0_OR_GREATER + global::System.Span buffer = stackalloc byte[256]; +#endif + foreach (var item in Metadata) + { +#if NET8_0_OR_GREATER + int bytesWritten = global::System.Text.Encoding.UTF8.GetBytes(item.Key.AsSpan(), buffer); + bool patchContains = (bytesWritten == 256) ? Patch.Contains("$.metadata"u8, global::System.Text.Encoding.UTF8.GetBytes(item.Key)) : Patch.Contains("$.metadata"u8, buffer.Slice(0, bytesWritten)); +#else + bool patchContains = Patch.Contains("$.metadata"u8, Encoding.UTF8.GetBytes(item.Key)); +#endif + if (!patchContains) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + } + + Patch.WriteTo(writer, "$.metadata"u8); + writer.WriteEndObject(); + } + else + { + writer.WriteNull("metadata"u8); + } + if (Optional.IsDefined(Temperature) && !Patch.Contains("$.temperature"u8)) + { + writer.WritePropertyName("temperature"u8); + writer.WriteNumberValue(Temperature.Value); + } + else + { + writer.WriteNull("temperature"u8); + } + if (Optional.IsDefined(TopP) && !Patch.Contains("$.top_p"u8)) + { + writer.WritePropertyName("top_p"u8); + writer.WriteNumberValue(TopP.Value); + } + else + { + writer.WriteNull("top_p"u8); + } + if (Optional.IsDefined(User) && !Patch.Contains("$.user"u8)) + { + writer.WritePropertyName("user"u8); + writer.WriteStringValue(User); + } + else + { + writer.WriteNull("user"u8); + } + if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) + { + writer.WritePropertyName("service_tier"u8); + writer.WriteStringValue(ServiceTier.Value.ToString()); + } + if (Optional.IsDefined(PreviousResponseId) && !Patch.Contains("$.previous_response_id"u8)) + { + writer.WritePropertyName("previous_response_id"u8); + writer.WriteStringValue(PreviousResponseId); + } + if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) + { + writer.WritePropertyName("model"u8); + writer.WriteStringValue(Model.Value.ToString()); + } + if (Optional.IsDefined(Reasoning) && !Patch.Contains("$.reasoning"u8)) + { + writer.WritePropertyName("reasoning"u8); + writer.WriteObjectValue(Reasoning, options); + } + if (Optional.IsDefined(Background) && !Patch.Contains("$.background"u8)) + { + writer.WritePropertyName("background"u8); + writer.WriteBooleanValue(Background.Value); + } + if (Optional.IsDefined(MaxOutputTokens) && !Patch.Contains("$.max_output_tokens"u8)) + { + writer.WritePropertyName("max_output_tokens"u8); + writer.WriteNumberValue(MaxOutputTokens.Value); + } + if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) + { + writer.WritePropertyName("instructions"u8); + writer.WriteStringValue(Instructions); + } + if (Optional.IsDefined(Text) && !Patch.Contains("$.text"u8)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(Text, options); + } + if (Patch.Contains("$.tools"u8)) + { + if (!Patch.IsRemoved("$.tools"u8)) + { + writer.WritePropertyName("tools"u8); + writer.WriteRawValue(Patch.GetJson("$.tools"u8)); + } + } + else if (Optional.IsCollectionDefined(Tools)) + { + writer.WritePropertyName("tools"u8); + writer.WriteStartArray(); + for (int i = 0; i < Tools.Count; i++) + { + if (Tools[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Tools[i], options); + } + Patch.WriteTo(writer, "$.tools"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) + { + writer.WritePropertyName("tool_choice"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(ToolChoice); +#else + using (JsonDocument document = JsonDocument.Parse(ToolChoice)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + if (Optional.IsDefined(Truncation) && !Patch.Contains("$.truncation"u8)) + { + writer.WritePropertyName("truncation"u8); + writer.WriteStringValue(Truncation.Value.ToString()); + } + if (!Patch.Contains("$.id"u8)) + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + if (!Patch.Contains("$.object"u8)) + { + writer.WritePropertyName("object"u8); + writer.WriteStringValue(Object); + } + if (Optional.IsDefined(Status) && !Patch.Contains("$.status"u8)) + { + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.Value.ToSerialString()); + } + if (!Patch.Contains("$.created_at"u8)) + { + writer.WritePropertyName("created_at"u8); + writer.WriteNumberValue(CreatedAt, "U"); + } + if (Optional.IsDefined(Error) && !Patch.Contains("$.error"u8)) + { + writer.WritePropertyName("error"u8); + writer.WriteObjectValue(Error, options); + } + else + { + writer.WriteNull("error"u8); + } + if (Optional.IsDefined(IncompleteDetails) && !Patch.Contains("$.incomplete_details"u8)) + { + writer.WritePropertyName("incomplete_details"u8); + writer.WriteObjectValue(IncompleteDetails, options); + } + else + { + writer.WriteNull("incomplete_details"u8); + } + if (Patch.Contains("$.output"u8)) + { + if (!Patch.IsRemoved("$.output"u8)) + { + writer.WritePropertyName("output"u8); + writer.WriteRawValue(Patch.GetJson("$.output"u8)); + } + } + else + { + writer.WritePropertyName("output"u8); + writer.WriteStartArray(); + for (int i = 0; i < Output.Count; i++) + { + if (Output[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Output[i], options); + } + Patch.WriteTo(writer, "$.output"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(OutputText) && !Patch.Contains("$.output_text"u8)) + { + writer.WritePropertyName("output_text"u8); + writer.WriteStringValue(OutputText); + } + if (Optional.IsDefined(Usage) && !Patch.Contains("$.usage"u8)) + { + writer.WritePropertyName("usage"u8); + writer.WriteObjectValue(Usage, options); + } + if (!Patch.Contains("$.parallel_tool_calls"u8)) + { + writer.WritePropertyName("parallel_tool_calls"u8); + writer.WriteBooleanValue(ParallelToolCalls); + } + + Patch.WriteTo(writer); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } + + ResponseResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResponseResult)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeResponseResult(document.RootElement, null, options); + } + + internal static ResponseResult DeserializeResponseResult(JsonElement element, BinaryData data, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary metadata = default; + float? temperature = default; + float? topP = default; + string user = default; + ResponseServiceTier? serviceTier = default; + string previousResponseId = default; + InternalModelIdsResponses? model = default; + ResponseReasoningOptions reasoning = default; + bool? background = default; + int? maxOutputTokens = default; + string instructions = default; + ResponseTextOptions text = default; + IList tools = default; + BinaryData toolChoice = default; + ResponseTruncationMode? truncation = default; + string id = default; + string @object = default; + ResponseStatus? status = default; + DateTimeOffset createdAt = default; + ResponseError error = default; + ResponseIncompleteStatusDetails incompleteDetails = default; + IList output = default; + string outputText = default; + ResponseTokenUsage usage = default; + bool parallelToolCalls = default; +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("metadata"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + metadata = new ChangeTrackingDictionary(); + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + metadata = dictionary; + continue; + } + if (prop.NameEquals("temperature"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + temperature = null; + continue; + } + temperature = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("top_p"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + topP = null; + continue; + } + topP = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("user"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + user = null; + continue; + } + user = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("service_tier"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + serviceTier = new ResponseServiceTier(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("previous_response_id"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + previousResponseId = null; + continue; + } + previousResponseId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("model"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + model = new InternalModelIdsResponses(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("reasoning"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + reasoning = null; + continue; + } + reasoning = ResponseReasoningOptions.DeserializeResponseReasoningOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("background"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + background = null; + continue; + } + background = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("max_output_tokens"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + maxOutputTokens = null; + continue; + } + maxOutputTokens = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("instructions"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + instructions = null; + continue; + } + instructions = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("text"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + text = ResponseTextOptions.DeserializeResponseTextOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("tools"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseTool.DeserializeResponseTool(item, item.GetUtf8Bytes(), options)); + } + tools = array; + continue; + } + if (prop.NameEquals("tool_choice"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + toolChoice = BinaryData.FromString(prop.Value.GetRawText()); + continue; + } + if (prop.NameEquals("truncation"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + truncation = null; + continue; + } + truncation = new ResponseTruncationMode(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("id"u8)) + { + id = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("object"u8)) + { + @object = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("status"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + status = prop.Value.GetString().ToResponseStatus(); + continue; + } + if (prop.NameEquals("created_at"u8)) + { + createdAt = DateTimeOffset.FromUnixTimeSeconds(prop.Value.GetInt64()); + continue; + } + if (prop.NameEquals("error"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + error = null; + continue; + } + error = ResponseError.DeserializeResponseError(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("incomplete_details"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + incompleteDetails = null; + continue; + } + incompleteDetails = ResponseIncompleteStatusDetails.DeserializeResponseIncompleteStatusDetails(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("output"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); + } + output = array; + continue; + } + if (prop.NameEquals("output_text"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + outputText = null; + continue; + } + outputText = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("usage"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + usage = ResponseTokenUsage.DeserializeResponseTokenUsage(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("parallel_tool_calls"u8)) + { + parallelToolCalls = prop.Value.GetBoolean(); + continue; + } + patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); + } + return new ResponseResult( + metadata, + temperature, + topP, + user, + serviceTier, + previousResponseId, + model, + reasoning, + background, + maxOutputTokens, + instructions, + text, + tools ?? new ChangeTrackingList(), + toolChoice, + truncation, + id, + @object, + status, + createdAt, + error, + incompleteDetails, + output, + outputText, + usage, + parallelToolCalls, + patch); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, OpenAIContext.Default); + default: + throw new FormatException($"The model {nameof(ResponseResult)} does not support writing '{options.Format}' format."); + } + } + + ResponseResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + protected virtual ResponseResult PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data)) + { + return DeserializeResponseResult(document.RootElement, data, options); + } + default: + throw new FormatException($"The model {nameof(ResponseResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + public static explicit operator ResponseResult(ClientResult result) + { + using PipelineResponse ResponseResult = result.GetRawResponse(); + BinaryData data = ResponseResult.Content; + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeResponseResult(document.RootElement, data, ModelSerializationExtensions.WireOptions); + } + + public static explicit operator ResponseResult(BinaryData data) + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeResponseResult(document.RootElement, data, ModelSerializationExtensions.WireOptions); + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + value = default; + + if (local.StartsWith("reasoning"u8)) + { + return Reasoning.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); + } + if (local.StartsWith("text"u8)) + { + return Text.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); + } + if (local.StartsWith("error"u8)) + { + return Error.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("error"u8.Length)], out value); + } + if (local.StartsWith("incomplete_details"u8)) + { + return IncompleteDetails.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], out value); + } + if (local.StartsWith("usage"u8)) + { + return Usage.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("usage"u8.Length)], out value); + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return Tools[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + if (local.StartsWith("output"u8)) + { + int propertyLength = "output"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return Output[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + + if (local.StartsWith("reasoning"u8)) + { + Reasoning.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); + return true; + } + if (local.StartsWith("text"u8)) + { + Text.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); + return true; + } + if (local.StartsWith("error"u8)) + { + Error.Patch.Set([.. "$"u8, .. local.Slice("error"u8.Length)], value); + return true; + } + if (local.StartsWith("incomplete_details"u8)) + { + IncompleteDetails.Patch.Set([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], value); + return true; + } + if (local.StartsWith("usage"u8)) + { + Usage.Patch.Set([.. "$"u8, .. local.Slice("usage"u8.Length)], value); + return true; + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + Tools[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + if (local.StartsWith("output"u8)) + { + int propertyLength = "output"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + Output[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } +} diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs new file mode 100644 index 000000000..85c694352 --- /dev/null +++ b/src/Custom/Responses/ResponseResult.cs @@ -0,0 +1,120 @@ +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class ResponseResult + { + [Experimental("SCME0001")] + private JsonPatch _patch; + + internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, string id, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IEnumerable output, bool parallelToolCalls) + { + // Plugin customization: ensure initialization of collections + Metadata = metadata ?? new ChangeTrackingDictionary(); + Temperature = temperature; + TopP = topP; + User = user; + Tools = new ChangeTrackingList(); + Id = id; + CreatedAt = createdAt; + Error = error; + IncompleteDetails = incompleteDetails; + Output = output.ToList(); + ParallelToolCalls = parallelToolCalls; + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, BinaryData toolChoice, ResponseTruncationMode? truncation, string id, string @object, ResponseStatus? status, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IList output, string outputText, ResponseTokenUsage usage, bool parallelToolCalls, in JsonPatch patch) + { + // Plugin customization: ensure initialization of collections + Metadata = metadata ?? new ChangeTrackingDictionary(); + Temperature = temperature; + TopP = topP; + User = user; + ServiceTier = serviceTier; + PreviousResponseId = previousResponseId; + Model = model; + Reasoning = reasoning; + Background = background; + MaxOutputTokens = maxOutputTokens; + Instructions = instructions; + Text = text; + Tools = tools ?? new ChangeTrackingList(); + ToolChoice = toolChoice; + Truncation = truncation; + Id = id; + Object = @object; + Status = status; + CreatedAt = createdAt; + Error = error; + IncompleteDetails = incompleteDetails; + Output = output ?? new ChangeTrackingList(); + OutputText = outputText; + Usage = usage; + ParallelToolCalls = parallelToolCalls; + _patch = patch; + _patch.SetPropagators(PropagateSet, PropagateGet); + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch => ref _patch; + + public IDictionary Metadata { get; } + + public float? Temperature { get; } + + public float? TopP { get; } + + public string User { get; } + + public ResponseServiceTier? ServiceTier { get; } + + public string PreviousResponseId { get; } + + internal InternalModelIdsResponses? Model { get; } + + public ResponseReasoningOptions Reasoning { get; } + + public bool? Background { get; } + + public int? MaxOutputTokens { get; } + + public string Instructions { get; } + + public ResponseTextOptions Text { get; } + + public IList Tools { get; } + + public BinaryData ToolChoice { get; } + + public ResponseTruncationMode? Truncation { get; } + + public string Id { get; } + + public string Object { get; } = "ResponseResult"; + + public ResponseStatus? Status { get; } + + public DateTimeOffset CreatedAt { get; } + + public ResponseError Error { get; } + + public ResponseIncompleteStatusDetails IncompleteDetails { get; } + + public IList Output { get; } + + public string OutputText { get; } + + public ResponseTokenUsage Usage { get; } + + public bool ParallelToolCalls { get; } + } +} diff --git a/src/Generated/OpenAIResponseClient.RestClient.cs b/src/Generated/OpenAIResponseClient.RestClient.cs index fd253274d..22b300298 100644 --- a/src/Generated/OpenAIResponseClient.RestClient.cs +++ b/src/Generated/OpenAIResponseClient.RestClient.cs @@ -4,7 +4,6 @@ using System.ClientModel; using System.ClientModel.Primitives; -using System.Collections.Generic; using OpenAI; namespace OpenAI.Responses @@ -29,34 +28,6 @@ internal virtual PipelineMessage CreateCreateResponseRequest(BinaryContent conte return message; } - internal virtual PipelineMessage CreateGetResponseRequest(string responseId, IEnumerable includables, bool? stream, int? startingAfter, RequestOptions options) - { - ClientUriBuilder uri = new ClientUriBuilder(); - uri.Reset(_endpoint); - uri.AppendPath("/responses/", false); - uri.AppendPath(responseId, true); - if (includables != null && !(includables is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) - { - foreach (var @param in includables) - { - uri.AppendQuery("include[]", @param.ToString(), true); - } - } - if (stream != null) - { - uri.AppendQuery("stream", TypeFormatters.ConvertToString(stream), true); - } - if (startingAfter != null) - { - uri.AppendQuery("starting_after", TypeFormatters.ConvertToString(startingAfter), true); - } - PipelineMessage message = Pipeline.CreateMessage(uri.ToUri(), "GET", PipelineMessageClassifier200); - PipelineRequest request = message.Request; - request.Headers.Set("Accept", "application/json, text/event-stream"); - message.Apply(options); - return message; - } - internal virtual PipelineMessage CreateDeleteResponseRequest(string responseId, RequestOptions options) { ClientUriBuilder uri = new ClientUriBuilder(); From 9157407d9c928a6fa225e0a1cacf639cd2e050d5 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 30 Oct 2025 17:15:43 -0500 Subject: [PATCH 02/15] CreateResponseStreaming --- src/Custom/Responses/OpenAIResponseClient.cs | 30 ++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index 49c98d662..4b120f03e 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -213,6 +213,26 @@ internal AsyncCollectionResult CreateResponseStreamingA requestOptions.CancellationToken); } + public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default) + { + return CreateResponseStreamingAsync(options, cancellationToken.ToRequestOptions(streaming: true)); + } + + internal AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, RequestOptions requestOptions) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNull(requestOptions, nameof(requestOptions)); + if (requestOptions.BufferResponse is true) + { + throw new InvalidOperationException("'requestOptions.BufferResponse' must be 'false' when calling 'CreateResponseStreamingAsync'."); + } + + return new AsyncSseUpdateCollection( + async () => await CreateResponseAsync(options, requestOptions).ConfigureAwait(false), + StreamingResponseUpdate.DeserializeStreamingResponseUpdate, + requestOptions.CancellationToken); + } + public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); @@ -224,6 +244,16 @@ public virtual CollectionResult CreateResponseStreaming cancellationToken); } + public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + + return new SseUpdateCollection( + () => CreateResponse(options, cancellationToken.ToRequestOptions(streaming: true)), + StreamingResponseUpdate.DeserializeStreamingResponseUpdate, + cancellationToken); + } + public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); From b91fb1d8a20170c08e1644c7439ed0e73c731655 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 30 Oct 2025 17:58:51 -0500 Subject: [PATCH 03/15] GetResponse --- src/Custom/Responses/GetResponseOptions.cs | 17 ++++++ src/Custom/Responses/OpenAIResponseClient.cs | 57 +++++++++++++++++--- 2 files changed, 68 insertions(+), 6 deletions(-) create mode 100644 src/Custom/Responses/GetResponseOptions.cs diff --git a/src/Custom/Responses/GetResponseOptions.cs b/src/Custom/Responses/GetResponseOptions.cs new file mode 100644 index 000000000..63bec2623 --- /dev/null +++ b/src/Custom/Responses/GetResponseOptions.cs @@ -0,0 +1,17 @@ + +namespace OpenAI.Responses +{ + public class GetResponseOptions + { + public GetResponseOptions(string responseId) + { + ResponseId = responseId; + } + + public string ResponseId { get; set; } + + public int? StartingAfter { get; set; } + + public bool Stream { get; set; } + } +} \ No newline at end of file diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index 4b120f03e..41728ff88 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -176,19 +176,19 @@ public virtual ClientResult CreateResponse(string userInputText, cancellationToken); } - public virtual ClientResult CreateResponse(CreateResponseOptions requestBody, CancellationToken cancellationToken = default) + public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(requestBody, nameof(requestBody)); + Argument.AssertNotNull(options, nameof(options)); - ClientResult result = this.CreateResponse(requestBody, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null); + ClientResult result = this.CreateResponse(options, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null); return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } - public virtual async Task> CreateResponseAsync(CreateResponseOptions requestBody, CancellationToken cancellationToken = default) + public virtual async Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(requestBody, nameof(requestBody)); + Argument.AssertNotNull(options, nameof(options)); - ClientResult result = await this.CreateResponseAsync(requestBody, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null).ConfigureAwait(false); + ClientResult result = await this.CreateResponseAsync(options, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null).ConfigureAwait(false); return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } @@ -302,6 +302,24 @@ public virtual ClientResult GetResponse(string responseId, Cance return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } + public async Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + ClientResult protocolResult = await GetResponseAsync(options.ResponseId, stream: options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()).ConfigureAwait(false); + return ClientResult.FromValue((ResponseResult)protocolResult, protocolResult.GetRawResponse()); + } + + public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + ClientResult protocolResult = GetResponse(options.ResponseId, stream: options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()); + return ClientResult.FromValue((ResponseResult)protocolResult, protocolResult.GetRawResponse()); + } + public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) { return GetResponseStreamingAsync(responseId, cancellationToken.ToRequestOptions(streaming: true), startingAfter); @@ -322,6 +340,33 @@ internal AsyncCollectionResult GetResponseStreamingAsyn requestOptions.CancellationToken); } + public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + return new SseUpdateCollection( + () => GetResponse(options.ResponseId, stream: true, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions(streaming: true)), + StreamingResponseUpdate.DeserializeStreamingResponseUpdate, + cancellationToken); + } + + public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + if (options.Stream is true) + { + throw new InvalidOperationException("'options.Stream' must be 'true' when calling 'GetResponseStreamingAsync'."); + } + + return new AsyncSseUpdateCollection( + async () => await GetResponseAsync(options.ResponseId, options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()).ConfigureAwait(false), + StreamingResponseUpdate.DeserializeStreamingResponseUpdate, + cancellationToken); + } + public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(responseId, nameof(responseId)); From 72a14c3a4837819eebffd6ce541102cd73bf0a88 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Mon, 3 Nov 2025 15:59:56 -0600 Subject: [PATCH 04/15] GetResponseInputItems --- .../Responses/GetResponseInputItemsOptions.cs | 20 ++ src/Custom/Responses/OpenAIResponseClient.cs | 20 ++ .../ResponseItemList.Serialization.cs | 238 ++++++++++++++++++ src/Custom/Responses/ResponseItemList.cs | 51 ++++ 4 files changed, 329 insertions(+) create mode 100644 src/Custom/Responses/GetResponseInputItemsOptions.cs create mode 100644 src/Custom/Responses/ResponseItemList.Serialization.cs create mode 100644 src/Custom/Responses/ResponseItemList.cs diff --git a/src/Custom/Responses/GetResponseInputItemsOptions.cs b/src/Custom/Responses/GetResponseInputItemsOptions.cs new file mode 100644 index 000000000..ed54c18ae --- /dev/null +++ b/src/Custom/Responses/GetResponseInputItemsOptions.cs @@ -0,0 +1,20 @@ +namespace OpenAI.Responses; + +public struct GetResponseInputItemsOptions +{ + public GetResponseInputItemsOptions(string responseId) + { + ResponseId = responseId; + } + + public string ResponseId { get; set; } + + public string After { get; set; } + + public string Before { get; set; } + + public int? Limit { get; set; } + + public string Order { get; set; } + +} \ No newline at end of file diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index 41728ff88..8834b879e 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -411,6 +411,26 @@ public virtual ClientResult CancelResponse(string responseId, Ca return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + PipelineMessage message = CreateGetResponseInputItemsRequest(options.ResponseId, options.Limit, options.After, options.Order, options.Before, cancellationToken.ToRequestOptions()); + ClientResult result = ClientResult.FromResponse(Pipeline.ProcessMessage(message, cancellationToken.ToRequestOptions())); + return ClientResult.FromValue((ResponseItemList)result, result.GetRawResponse()); + } + + public virtual async Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); + + PipelineMessage message = CreateGetResponseInputItemsRequest(options.ResponseId, options.Limit, options.After, options.Order, options.Before, cancellationToken.ToRequestOptions()); + ClientResult result = ClientResult.FromResponse(await Pipeline.ProcessMessageAsync(message, cancellationToken.ToRequestOptions()).ConfigureAwait(false)); + return ClientResult.FromValue((ResponseItemList)result, result.GetRawResponse()); + } + internal virtual ResponseCreationOptions CreatePerCallOptions(ResponseCreationOptions userOptions, IEnumerable inputItems, bool stream = false) { ResponseCreationOptions copiedOptions = userOptions is null diff --git a/src/Custom/Responses/ResponseItemList.Serialization.cs b/src/Custom/Responses/ResponseItemList.Serialization.cs new file mode 100644 index 000000000..cbf658b88 --- /dev/null +++ b/src/Custom/Responses/ResponseItemList.Serialization.cs @@ -0,0 +1,238 @@ +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; + +namespace OpenAI.Responses +{ + public partial class ResponseItemList : IJsonModel + { + internal ResponseItemList() : this(null, null, default, null, null, default) + { + } + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Patch.Contains("$"u8)) + { + writer.WriteRawValue(Patch.GetJson("$"u8)); + return; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResponseItemList)} does not support writing '{format}' format."); + } +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (!Patch.Contains("$.object"u8)) + { + writer.WritePropertyName("object"u8); + writer.WriteStringValue(Object); + } + if (Patch.Contains("$.data"u8)) + { + if (!Patch.IsRemoved("$.data"u8)) + { + writer.WritePropertyName("data"u8); + writer.WriteRawValue(Patch.GetJson("$.data"u8)); + } + } + else + { + writer.WritePropertyName("data"u8); + writer.WriteStartArray(); + for (int i = 0; i < Data.Count; i++) + { + if (Data[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Data[i], options); + } + Patch.WriteTo(writer, "$.data"u8); + writer.WriteEndArray(); + } + if (!Patch.Contains("$.has_more"u8)) + { + writer.WritePropertyName("has_more"u8); + writer.WriteBooleanValue(HasMore); + } + if (!Patch.Contains("$.first_id"u8)) + { + writer.WritePropertyName("first_id"u8); + writer.WriteStringValue(FirstId); + } + if (!Patch.Contains("$.last_id"u8)) + { + writer.WritePropertyName("last_id"u8); + writer.WriteStringValue(LastId); + } + + Patch.WriteTo(writer); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } + + ResponseItemList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ResponseItemList)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeResponseItemList(document.RootElement, null, options); + } + + internal static ResponseItemList DeserializeResponseItemList(JsonElement element, BinaryData data, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string @object = default; + IList data0 = default; + bool hasMore = default; + string firstId = default; + string lastId = default; +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("object"u8)) + { + @object = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("data"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); + } + data0 = array; + continue; + } + if (prop.NameEquals("has_more"u8)) + { + hasMore = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("first_id"u8)) + { + firstId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("last_id"u8)) + { + lastId = prop.Value.GetString(); + continue; + } + patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); + } + return new ResponseItemList( + @object, + data0, + hasMore, + firstId, + lastId, + patch); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, OpenAIContext.Default); + default: + throw new FormatException($"The model {nameof(ResponseItemList)} does not support writing '{options.Format}' format."); + } + } + + ResponseItemList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data)) + { + return DeserializeResponseItemList(document.RootElement, data, options); + } + default: + throw new FormatException($"The model {nameof(ResponseItemList)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + public static explicit operator ResponseItemList(ClientResult result) + { + using PipelineResponse response = result.GetRawResponse(); + BinaryData data = response.Content; + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeResponseItemList(document.RootElement, data, ModelSerializationExtensions.WireOptions); + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + value = default; + + if (local.StartsWith("data"u8)) + { + int propertyLength = "data"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return Data[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + + if (local.StartsWith("data"u8)) + { + int propertyLength = "data"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + Data[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } +} diff --git a/src/Custom/Responses/ResponseItemList.cs b/src/Custom/Responses/ResponseItemList.cs new file mode 100644 index 000000000..ea1473162 --- /dev/null +++ b/src/Custom/Responses/ResponseItemList.cs @@ -0,0 +1,51 @@ +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class ResponseItemList + { + [Experimental("SCME0001")] + private JsonPatch _patch; + + internal ResponseItemList(IEnumerable data, bool hasMore, string firstId, string lastId) + { + Data = data.ToList(); + HasMore = hasMore; + FirstId = firstId; + LastId = lastId; + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + internal ResponseItemList(string @object, IList data, bool hasMore, string firstId, string lastId, in JsonPatch patch) + { + // Plugin customization: ensure initialization of collections + Object = @object; + Data = data ?? new ChangeTrackingList(); + HasMore = hasMore; + FirstId = firstId; + LastId = lastId; + _patch = patch; + _patch.SetPropagators(PropagateSet, PropagateGet); + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch => ref _patch; + + public string Object { get; } = "list"; + + public IList Data { get; } + + public bool HasMore { get; } + + public string FirstId { get; } + + public string LastId { get; } + } +} From 994700f2f6af6fbd978c763e0fc4ef4ec0c56784 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 6 Nov 2025 10:45:46 -0600 Subject: [PATCH 05/15] export --- api/OpenAI.net8.0.cs | 116 ++++++++++++++++++ api/OpenAI.netstandard2.0.cs | 109 ++++++++++++++++ .../OpenAIResponseClient.Protocol.cs | 1 - 3 files changed, 225 insertions(+), 1 deletion(-) diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 7ac79dd5a..319d415c3 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -5053,6 +5053,38 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } [Experimental("OPENAI001")] + public class CreateResponseOptions : IJsonModel, IPersistableModel { + public CreateResponseOptions(List input); + public bool? Background { get; set; } + public IList Include { get; set; } + public IList Input { get; } + public string Instructions { get; set; } + public int? MaxOutputTokens { get; set; } + public IDictionary Metadata { get; } + public bool? ParallelToolCalls { get; set; } + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch { get; } + public string PreviousResponseId { get; set; } + public ResponseReasoningOptions Reasoning { get; set; } + public ResponseServiceTier? ServiceTier { get; set; } + public bool? Store { get; set; } + public bool? Stream { get; set; } + public float? Temperature { get; set; } + public ResponseTextOptions Text { get; set; } + public ResponseToolChoice ToolChoice { get; set; } + public IList Tools { get; } + public float? TopP { get; set; } + public ResponseTruncationMode? Truncation { get; set; } + public string User { get; set; } + public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false); + protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); + protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } + [Experimental("OPENAI001")] public class CustomMcpToolCallApprovalPolicy : IJsonModel, IPersistableModel { [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] @@ -5205,6 +5237,20 @@ public class FunctionTool : ResponseTool, IJsonModel, IPersistable protected override ResponseTool PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } + public partial struct GetResponseInputItemsOptions { + public GetResponseInputItemsOptions(string responseId); + public string After { get; set; } + public string Before { get; set; } + public int? Limit { get; set; } + public string Order { get; set; } + public string ResponseId { get; set; } + } + public class GetResponseOptions { + public GetResponseOptions(string responseId); + public string ResponseId { get; set; } + public int? StartingAfter { get; set; } + public bool Stream { get; set; } + } [Experimental("OPENAI001")] public readonly partial struct GlobalMcpToolCallApprovalPolicy : IEquatable { public GlobalMcpToolCallApprovalPolicy(string value); @@ -5372,6 +5418,14 @@ public class ImageGenerationToolInputImageMask : IJsonModel, IPersistableModel { public McpTool(string serverLabel, McpToolConnectorId connectorId); public McpTool(string serverLabel, Uri serverUri); @@ -5574,29 +5628,39 @@ public class OpenAIResponseClient { public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task CancelResponseAsync(string responseId, RequestOptions options); public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default); + public Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); + public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); } [Experimental("OPENAI001")] @@ -5895,6 +5959,22 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { + public IList Data { get; } + public string FirstId { get; } + public bool HasMore { get; } + public string LastId { get; } + public string Object { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch { get; } + protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseItemList(ClientResult result); + protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } + [Experimental("OPENAI001")] public class ResponseMessageAnnotation : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } [EditorBrowsable(EditorBrowsableState.Never)] @@ -5971,6 +6051,42 @@ public class ResponseReasoningOptions : IJsonModel, IP public override readonly string ToString(); } [Experimental("OPENAI001")] + public class ResponseResult : IJsonModel, IPersistableModel { + public bool? Background { get; } + public DateTimeOffset CreatedAt { get; } + public ResponseError Error { get; } + public string Id { get; } + public ResponseIncompleteStatusDetails IncompleteDetails { get; } + public string Instructions { get; } + public int? MaxOutputTokens { get; } + public IDictionary Metadata { get; } + public string Object { get; } + public IList Output { get; } + public string OutputText { get; } + public bool ParallelToolCalls { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch { get; } + public string PreviousResponseId { get; } + public ResponseReasoningOptions Reasoning { get; } + public ResponseServiceTier? ServiceTier { get; } + public ResponseStatus? Status { get; } + public float? Temperature { get; } + public ResponseTextOptions Text { get; } + public BinaryData ToolChoice { get; } + public IList Tools { get; } + public float? TopP { get; } + public ResponseTruncationMode? Truncation { get; } + public ResponseTokenUsage Usage { get; } + public string User { get; } + protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseResult(BinaryData data); + public static explicit operator ResponseResult(ClientResult result); + protected virtual ResponseResult PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } + [Experimental("OPENAI001")] public readonly partial struct ResponseServiceTier : IEquatable { public ResponseServiceTier(string value); public static ResponseServiceTier Auto { get; } diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index 64db26dd3..dac09b81d 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -4415,6 +4415,36 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, protected override ResponseMessageAnnotation PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } + public class CreateResponseOptions : IJsonModel, IPersistableModel { + public CreateResponseOptions(List input); + public bool? Background { get; set; } + public IList Include { get; set; } + public IList Input { get; } + public string Instructions { get; set; } + public int? MaxOutputTokens { get; set; } + public IDictionary Metadata { get; } + public bool? ParallelToolCalls { get; set; } + [EditorBrowsable(EditorBrowsableState.Never)] + public ref JsonPatch Patch { get; } + public string PreviousResponseId { get; set; } + public ResponseReasoningOptions Reasoning { get; set; } + public ResponseServiceTier? ServiceTier { get; set; } + public bool? Store { get; set; } + public bool? Stream { get; set; } + public float? Temperature { get; set; } + public ResponseTextOptions Text { get; set; } + public ResponseToolChoice ToolChoice { get; set; } + public IList Tools { get; } + public float? TopP { get; set; } + public ResponseTruncationMode? Truncation { get; set; } + public string User { get; set; } + public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false); + protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); + protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } public class CustomMcpToolCallApprovalPolicy : IJsonModel, IPersistableModel { [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } @@ -4551,6 +4581,20 @@ public class FunctionTool : ResponseTool, IJsonModel, IPersistable protected override ResponseTool PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } + public partial struct GetResponseInputItemsOptions { + public GetResponseInputItemsOptions(string responseId); + public string After { get; set; } + public string Before { get; set; } + public int? Limit { get; set; } + public string Order { get; set; } + public string ResponseId { get; set; } + } + public class GetResponseOptions { + public GetResponseOptions(string responseId); + public string ResponseId { get; set; } + public int? StartingAfter { get; set; } + public bool Stream { get; set; } + } public readonly partial struct GlobalMcpToolCallApprovalPolicy : IEquatable { public GlobalMcpToolCallApprovalPolicy(string value); public static GlobalMcpToolCallApprovalPolicy AlwaysRequireApproval { get; } @@ -4705,6 +4749,13 @@ public class ImageGenerationToolInputImageMask : IJsonModel, IPersistableModel { public McpTool(string serverLabel, McpToolConnectorId connectorId); public McpTool(string serverLabel, Uri serverUri); @@ -4888,29 +4939,39 @@ public class OpenAIResponseClient { public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task CancelResponseAsync(string responseId, RequestOptions options); public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default); + public Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); + public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); } public static class OpenAIResponsesModelFactory { @@ -5178,6 +5239,20 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { + public IList Data { get; } + public string FirstId { get; } + public bool HasMore { get; } + public string LastId { get; } + public string Object { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + public ref JsonPatch Patch { get; } + protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseItemList(ClientResult result); + protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } public class ResponseMessageAnnotation : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } [EditorBrowsable(EditorBrowsableState.Never)] @@ -5245,6 +5320,40 @@ public class ResponseReasoningOptions : IJsonModel, IP public static bool operator !=(ResponseReasoningSummaryVerbosity left, ResponseReasoningSummaryVerbosity right); public override readonly string ToString(); } + public class ResponseResult : IJsonModel, IPersistableModel { + public bool? Background { get; } + public DateTimeOffset CreatedAt { get; } + public ResponseError Error { get; } + public string Id { get; } + public ResponseIncompleteStatusDetails IncompleteDetails { get; } + public string Instructions { get; } + public int? MaxOutputTokens { get; } + public IDictionary Metadata { get; } + public string Object { get; } + public IList Output { get; } + public string OutputText { get; } + public bool ParallelToolCalls { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + public ref JsonPatch Patch { get; } + public string PreviousResponseId { get; } + public ResponseReasoningOptions Reasoning { get; } + public ResponseServiceTier? ServiceTier { get; } + public ResponseStatus? Status { get; } + public float? Temperature { get; } + public ResponseTextOptions Text { get; } + public BinaryData ToolChoice { get; } + public IList Tools { get; } + public float? TopP { get; } + public ResponseTruncationMode? Truncation { get; } + public ResponseTokenUsage Usage { get; } + public string User { get; } + protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseResult(BinaryData data); + public static explicit operator ResponseResult(ClientResult result); + protected virtual ResponseResult PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } public readonly partial struct ResponseServiceTier : IEquatable { public ResponseServiceTier(string value); public static ResponseServiceTier Auto { get; } diff --git a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs index 3ff7e5340..fbc0684a9 100644 --- a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs +++ b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs @@ -1,7 +1,6 @@ using System.ClientModel; using System.ClientModel.Primitives; using System.Collections.Generic; -using System.ComponentModel; using System.Threading.Tasks; namespace OpenAI.Responses; From 09b98ef31c5b179b8e1bcb724e870dda1d17e40c Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Mon, 10 Nov 2025 09:25:29 -0600 Subject: [PATCH 06/15] wip --- .../Responses/Example01_SimpleResponse.cs | 4 +- .../Example01_SimpleResponseAsync.cs | 4 +- .../Example02_SimpleResponseStreaming.cs | 2 +- .../Example02_SimpleResponseStreamingAsync.cs | 2 +- .../Responses/Example03_FunctionCalling.cs | 10 +- .../Example03_FunctionCallingAsync.cs | 10 +- .../Example04_FunctionCallingStreaming.cs | 4 +- ...Example04_FunctionCallingStreamingAsync.cs | 4 +- examples/Responses/Example05_RemoteMcp.cs | 8 +- .../Responses/Example05_RemoteMcpAsync.cs | 8 +- .../Example06_RemoteMcpAuthentication.cs | 7 +- .../Example06_RemoteMcpAuthenticationAsync.cs | 8 +- .../Example07_InputAdditionalProperties.cs | 10 +- ...xample07_InputAdditionalPropertiesAsync.cs | 10 +- .../Example08_OutputAdditionalProperties.cs | 8 +- ...ample08_OutputAdditionalPropertiesAsync.cs | 8 +- .../Example09_ModelOverridePerRequest.cs | 10 +- .../Example09_ModelOverridePerRequestAsync.cs | 10 +- .../Responses/Example10_CodeInterpreter.cs | 15 +- .../Example10_CodeInterpreterAsync.cs | 13 +- src/Custom/Responses/CreateResponseOptions.cs | 2 +- .../Responses/Internal/GeneratorStubs.cs | 2 +- src/Custom/Responses/OpenAIResponse.cs | 6 +- src/Custom/Responses/OpenAIResponseClient.cs | 38 +-- .../Responses/OpenAIResponsesModelFactory.cs | 2 +- .../Responses/ResponseCreationOptions.cs | 4 +- .../Responses/ResponseResult.Serialization.cs | 17 +- src/Custom/Responses/ResponseResult.cs | 10 +- ...ngResponseCompletedUpdate.Serialization.cs | 4 +- .../StreamingResponseCompletedUpdate.cs | 10 +- ...mingResponseCreatedUpdate.Serialization.cs | 4 +- .../StreamingResponseCreatedUpdate.cs | 6 +- ...amingResponseFailedUpdate.Serialization.cs | 8 +- .../StreamingResponseFailedUpdate.cs | 10 +- ...gResponseInProgressUpdate.Serialization.cs | 8 +- .../StreamingResponseInProgressUpdate.cs | 10 +- ...gResponseIncompleteUpdate.Serialization.cs | 8 +- .../StreamingResponseIncompleteUpdate.cs | 10 +- ...amingResponseQueuedUpdate.Serialization.cs | 8 +- .../StreamingResponseQueuedUpdate.cs | 10 +- .../Responses/OpenAIResponse.Serialization.cs | 2 +- .../Models/Responses/OpenAIResponse.cs | 2 +- .../ResponseCreationOptions.Serialization.cs | 2 +- .../Responses/ResponseCreationOptions.cs | 2 +- .../OpenAIResponsesModelFactoryTests.cs | 50 +-- tests/Responses/ResponseStoreTests.cs | 24 +- tests/Responses/ResponsesTests.cs | 318 +++++++++--------- tests/Responses/ResponsesToolTests.cs | 141 ++++---- 48 files changed, 411 insertions(+), 462 deletions(-) rename src/{Generated/Models => Custom}/Responses/StreamingResponseCompletedUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseCompletedUpdate.cs (79%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseCreatedUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseCreatedUpdate.cs (77%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseFailedUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseFailedUpdate.cs (73%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseInProgressUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseInProgressUpdate.cs (79%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseIncompleteUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseIncompleteUpdate.cs (79%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseQueuedUpdate.Serialization.cs (97%) rename src/{Generated/Models => Custom}/Responses/StreamingResponseQueuedUpdate.cs (73%) diff --git a/examples/Responses/Example01_SimpleResponse.cs b/examples/Responses/Example01_SimpleResponse.cs index e7176bf00..03d72e94f 100644 --- a/examples/Responses/Example01_SimpleResponse.cs +++ b/examples/Responses/Example01_SimpleResponse.cs @@ -15,9 +15,9 @@ public void Example01_SimpleResponse() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = client.CreateResponse("Say 'this is a test.'"); + ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example01_SimpleResponseAsync.cs b/examples/Responses/Example01_SimpleResponseAsync.cs index 9d76e919b..5ef3532a6 100644 --- a/examples/Responses/Example01_SimpleResponseAsync.cs +++ b/examples/Responses/Example01_SimpleResponseAsync.cs @@ -16,9 +16,9 @@ public async Task Example01_SimpleResponseAsync() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = await client.CreateResponseAsync("Say 'this is a test.'"); + ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example02_SimpleResponseStreaming.cs b/examples/Responses/Example02_SimpleResponseStreaming.cs index 69269a2c6..3c50039aa 100644 --- a/examples/Responses/Example02_SimpleResponseStreaming.cs +++ b/examples/Responses/Example02_SimpleResponseStreaming.cs @@ -16,7 +16,7 @@ public void Example02_SimpleResponseStreaming() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - CollectionResult responseUpdates = client.CreateResponseStreaming("Say 'this is a test.'"); + CollectionResult responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); Console.Write($"[ASSISTANT]: "); foreach (StreamingResponseUpdate update in responseUpdates) diff --git a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs index 596cfbd54..6cef0f978 100644 --- a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs +++ b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs @@ -18,7 +18,7 @@ public async Task Example02_SimpleResponseStreamingAsync() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync("Say 'this is a test.'"); + AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); Console.Write($"[ASSISTANT]: "); await foreach (StreamingResponseUpdate update in responseUpdates) diff --git a/examples/Responses/Example03_FunctionCalling.cs b/examples/Responses/Example03_FunctionCalling.cs index 3dac6bd96..872625b49 100644 --- a/examples/Responses/Example03_FunctionCalling.cs +++ b/examples/Responses/Example03_FunctionCalling.cs @@ -69,7 +69,7 @@ public void Example03_FunctionCalling() ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - ResponseCreationOptions options = new() + CreateResponseOptions options = new(inputItems) { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; @@ -81,11 +81,11 @@ public void Example03_FunctionCalling() do { requiresAction = false; - OpenAIResponse response = client.CreateResponse(inputItems, options); + ResponseResult response = client.CreateResponse(options); - inputItems.AddRange(response.OutputItems); + inputItems.AddRange(response.Output); - foreach (ResponseItem outputItem in response.OutputItems) + foreach (ResponseItem outputItem in response.Output) { if (outputItem is FunctionCallResponseItem functionCall) { @@ -132,7 +132,7 @@ public void Example03_FunctionCalling() } } - PrintMessageItems(response.OutputItems.OfType()); + PrintMessageItems(response.Output.OfType()); } while (requiresAction); } diff --git a/examples/Responses/Example03_FunctionCallingAsync.cs b/examples/Responses/Example03_FunctionCallingAsync.cs index 2f92baea2..19101ff66 100644 --- a/examples/Responses/Example03_FunctionCallingAsync.cs +++ b/examples/Responses/Example03_FunctionCallingAsync.cs @@ -26,7 +26,7 @@ public async Task Example03_FunctionCallingAsync() ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - ResponseCreationOptions options = new() + CreateResponseOptions options = new(inputItems) { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; @@ -38,11 +38,11 @@ public async Task Example03_FunctionCallingAsync() do { requiresAction = false; - OpenAIResponse response = await client.CreateResponseAsync(inputItems, options); + ResponseResult response = await client.CreateResponseAsync(options); - inputItems.AddRange(response.OutputItems); + inputItems.AddRange(response.Output); - foreach (ResponseItem outputItem in response.OutputItems) + foreach (ResponseItem outputItem in response.Output) { if (outputItem is FunctionCallResponseItem functionCall) { @@ -89,7 +89,7 @@ public async Task Example03_FunctionCallingAsync() } } - PrintMessageItems(response.OutputItems.OfType()); + PrintMessageItems(response.Output.OfType()); } while (requiresAction); } diff --git a/examples/Responses/Example04_FunctionCallingStreaming.cs b/examples/Responses/Example04_FunctionCallingStreaming.cs index 6c16c79ff..72a109e8e 100644 --- a/examples/Responses/Example04_FunctionCallingStreaming.cs +++ b/examples/Responses/Example04_FunctionCallingStreaming.cs @@ -26,7 +26,7 @@ public void Example04_FunctionCallingStreaming() ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - ResponseCreationOptions options = new() + CreateResponseOptions options = new(inputItems) { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; @@ -38,7 +38,7 @@ public void Example04_FunctionCallingStreaming() do { requiresAction = false; - CollectionResult responseUpdates = client.CreateResponseStreaming(inputItems, options); + CollectionResult responseUpdates = client.CreateResponseStreaming(options); foreach (StreamingResponseUpdate update in responseUpdates) { diff --git a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs index 177522d31..a8f6885b5 100644 --- a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs +++ b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs @@ -27,7 +27,7 @@ public async Task Example04_FunctionCallingStreamingAsync() ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - ResponseCreationOptions options = new() + CreateResponseOptions options = new(inputItems) { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; @@ -39,7 +39,7 @@ public async Task Example04_FunctionCallingStreamingAsync() do { requiresAction = false; - AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(inputItems, options); + AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(options); await foreach (StreamingResponseUpdate update in responseUpdates) { diff --git a/examples/Responses/Example05_RemoteMcp.cs b/examples/Responses/Example05_RemoteMcp.cs index 282a58376..aae2f9d94 100644 --- a/examples/Responses/Example05_RemoteMcp.cs +++ b/examples/Responses/Example05_RemoteMcp.cs @@ -13,7 +13,9 @@ public partial class ResponseExamples [Test] public void Example05_RemoteMcp() { - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Roll 2d4+1") + ]) { Tools = { new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse")) @@ -26,9 +28,9 @@ public void Example05_RemoteMcp() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = client.CreateResponse("Roll 2d4+1", options); + ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example05_RemoteMcpAsync.cs b/examples/Responses/Example05_RemoteMcpAsync.cs index c1066b8e7..860d2b325 100644 --- a/examples/Responses/Example05_RemoteMcpAsync.cs +++ b/examples/Responses/Example05_RemoteMcpAsync.cs @@ -14,7 +14,9 @@ public partial class ResponseExamples [Test] public async Task Example05_RemoteMcpAsync() { - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Roll 2d4+1") + ]) { Tools = { new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse")) @@ -27,9 +29,9 @@ public async Task Example05_RemoteMcpAsync() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options); + ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example06_RemoteMcpAuthentication.cs b/examples/Responses/Example06_RemoteMcpAuthentication.cs index 612d28c65..4dea86566 100644 --- a/examples/Responses/Example06_RemoteMcpAuthentication.cs +++ b/examples/Responses/Example06_RemoteMcpAuthentication.cs @@ -13,7 +13,8 @@ public partial class ResponseExamples [Test] public void Example06_RemoteMcpAuthentication() { - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Create a payment link for $20")]) { Tools = { new McpTool(serverLabel: "stripe", serverUri: new Uri("https://mcp.stripe.com")) @@ -25,9 +26,9 @@ public void Example06_RemoteMcpAuthentication() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = client.CreateResponse("Create a payment link for $20", options); + ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs index c70af1682..f6f0c03db 100644 --- a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs +++ b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs @@ -14,7 +14,9 @@ public partial class ResponseExamples [Test] public async Task Example06_RemoteMcpAuthenticationAsync() { - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Create a payment link for $20") + ]) { Tools = { new McpTool(serverLabel: "stripe", serverUri: new Uri("https://mcp.stripe.com")) @@ -26,9 +28,9 @@ public async Task Example06_RemoteMcpAuthenticationAsync() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - OpenAIResponse response = await client.CreateResponseAsync("Create a payment link for $20", options); + ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); } } diff --git a/examples/Responses/Example07_InputAdditionalProperties.cs b/examples/Responses/Example07_InputAdditionalProperties.cs index 6429cc6de..e7ed5986e 100644 --- a/examples/Responses/Example07_InputAdditionalProperties.cs +++ b/examples/Responses/Example07_InputAdditionalProperties.cs @@ -17,15 +17,17 @@ public void Example07_InputAdditionalProperties() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. - // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on ResponseCreationOptions in the request payload. + // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. - ResponseCreationOptions options = new(); + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("What is the answer to the ultimate question of life, the universe, and everything?") + ]); options.Patch.Set("$.reasoning.effort"u8, "high"); options.Patch.Set("$.text.verbosity"u8, "medium"); - OpenAIResponse response = client.CreateResponse("What is the answer to the ultimate question of life, the universe, and everything?", options); + ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); // Read extra fields from the response via Patch. // The service returns fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on OpenAIResponse. diff --git a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs index 4e1ce72dd..35836b233 100644 --- a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs @@ -18,15 +18,17 @@ public async Task Example07_InputAdditionalPropertiesAsync() OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. - // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on ResponseCreationOptions in the request payload. + // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. - ResponseCreationOptions options = new(); + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("What is the answer to the ultimate question of life, the universe, and everything?") + ]); options.Patch.Set("$.reasoning.effort"u8, "high"); options.Patch.Set("$.text.verbosity"u8, "medium"); - OpenAIResponse response = await client.CreateResponseAsync("What is the answer to the ultimate question of life, the universe, and everything?", options); + ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); // Read extra fields from the response via Patch. // The service returns fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on OpenAIResponse. diff --git a/examples/Responses/Example08_OutputAdditionalProperties.cs b/examples/Responses/Example08_OutputAdditionalProperties.cs index 09aaf8f30..8e0e3427e 100644 --- a/examples/Responses/Example08_OutputAdditionalProperties.cs +++ b/examples/Responses/Example08_OutputAdditionalProperties.cs @@ -17,7 +17,9 @@ public void Example08_OutputAdditionalProperties() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") + ]) { Tools = { @@ -28,8 +30,8 @@ public void Example08_OutputAdditionalProperties() } }; - OpenAIResponse response = client.CreateResponse("Generate an image of gray tabby cat hugging an otter with an orange scarf", options); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[1]; + ResponseResult response = client.CreateResponse(options); + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[1]; BinaryData bytes = imageGenResponse.ImageResultBytes; using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png"); diff --git a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs index 3402fe54a..97f7ab9aa 100644 --- a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs @@ -18,7 +18,9 @@ public async Task Example08_OutputAdditionalPropertiesAsync() { OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") + ]) { Tools = { @@ -29,8 +31,8 @@ public async Task Example08_OutputAdditionalPropertiesAsync() } }; - OpenAIResponse response = await client.CreateResponseAsync("Generate an image of gray tabby cat hugging an otter with an orange scarf", options); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[1]; + ResponseResult response = await client.CreateResponseAsync(options); + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[1]; BinaryData bytes = imageGenResponse.ImageResultBytes; using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png"); diff --git a/examples/Responses/Example09_ModelOverridePerRequest.cs b/examples/Responses/Example09_ModelOverridePerRequest.cs index f948d35b6..ff96692b6 100644 --- a/examples/Responses/Example09_ModelOverridePerRequest.cs +++ b/examples/Responses/Example09_ModelOverridePerRequest.cs @@ -17,15 +17,17 @@ public void Example09_ModelOverridePerRequest() OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. - // Patch lets you set fields like `model` that aren't exposed on ResponseCreationOptions. + // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. // This overrides the model set on the client just for the request where this options instance is used. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. - ResponseCreationOptions options = new(); + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Say 'this is a test.") + ]); options.Patch.Set("$.model"u8, "gpt-5"); - OpenAIResponse response = client.CreateResponse("Say 'this is a test.", options); + ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}, [Mode]: {response.Model}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}, [Mode]: {response.Model}"); } } diff --git a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs index 3796b675f..05b7c892c 100644 --- a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs +++ b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs @@ -18,15 +18,17 @@ public async Task Example09_ModelOverridePerRequestAsync() OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. - // Patch lets you set fields like `model` that aren't exposed on ResponseCreationOptions. + // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. // This overrides the model set on the client just for the request where this options instance is used. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. - ResponseCreationOptions options = new(); + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Say 'this is a test.") + ]); options.Patch.Set("$.model"u8, "gpt-5"); - OpenAIResponse response = await client.CreateResponseAsync("Say 'this is a test.", options); + ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}, [Mode]: {response.Model}"); + Console.WriteLine($"[ASSISTANT]: {response.OutputText}, [Mode]: {response.Model}"); } } diff --git a/examples/Responses/Example10_CodeInterpreter.cs b/examples/Responses/Example10_CodeInterpreter.cs index b457cdfcd..489e5cf09 100644 --- a/examples/Responses/Example10_CodeInterpreter.cs +++ b/examples/Responses/Example10_CodeInterpreter.cs @@ -21,19 +21,20 @@ public void Example10_CodeInterpreter() CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); - ResponseCreationOptions options = new() - { - Tools = { codeInterpreterTool } - }; - List inputItems = [ ResponseItem.CreateUserMessageItem("Create an Excel spreadsheet that contains the mathematical times tables from 1-12 and make it available for download."), ]; + CreateResponseOptions options = new(inputItems) + { + Tools = { codeInterpreterTool } + }; + + - OpenAIResponse response = client.CreateResponse(inputItems, options); + ResponseResult response = client.CreateResponse(options); - MessageResponseItem message = response.OutputItems + MessageResponseItem message = response.Output .OfType() .FirstOrDefault(); diff --git a/examples/Responses/Example10_CodeInterpreterAsync.cs b/examples/Responses/Example10_CodeInterpreterAsync.cs index 6bf523830..8945a560e 100644 --- a/examples/Responses/Example10_CodeInterpreterAsync.cs +++ b/examples/Responses/Example10_CodeInterpreterAsync.cs @@ -22,19 +22,16 @@ public async Task Example10_CodeInterpreterAsync() CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ + ResponseItem.CreateUserMessageItem("Create an Excel spreadsheet that contains the mathematical times tables from 1-12 and make it available for download."), + ]) { Tools = { codeInterpreterTool } }; - List inputItems = - [ - ResponseItem.CreateUserMessageItem("Create an Excel spreadsheet that contains the mathematical times tables from 1-12 and make it available for download."), - ]; - - OpenAIResponse response = await client.CreateResponseAsync(inputItems, options); + ResponseResult response = await client.CreateResponseAsync(options); - MessageResponseItem message = response.OutputItems + MessageResponseItem message = response.Output .OfType() .FirstOrDefault(); diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index b516b6a89..e5479427d 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -96,7 +96,7 @@ internal CreateResponseOptions(IDictionary metadata, float? temp public bool? Stream { get; set; } - public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) + internal static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) { Argument.AssertNotNull(inputItems, nameof(inputItems)); options ??= new(); diff --git a/src/Custom/Responses/Internal/GeneratorStubs.cs b/src/Custom/Responses/Internal/GeneratorStubs.cs index d92b2c7fd..23a046343 100644 --- a/src/Custom/Responses/Internal/GeneratorStubs.cs +++ b/src/Custom/Responses/Internal/GeneratorStubs.cs @@ -42,7 +42,7 @@ namespace OpenAI.Responses; [CodeGenType("ItemReferenceItemParam")] internal partial class InternalItemReferenceItemParam {} [CodeGenType("ItemType")] internal readonly partial struct InternalItemType {} [CodeGenType("LocationType")] internal readonly partial struct InternalWebSearchUserLocationKind {} -[CodeGenType("ModelIdsResponses")] internal readonly partial struct InternalModelIdsResponses {} +// [CodeGenType("ModelIdsResponses")] internal readonly partial struct InternalModelIdsResponses {} [CodeGenType("RankingOptionsRanker1")] internal readonly partial struct InternalRankingOptionsRanker1 {} [CodeGenType("ReasoningGenerateSummary")] internal readonly partial struct InternalReasoningGenerateSummary {} [CodeGenType("ReasoningItemParam")] internal partial class InternalReasoningItemParam {} diff --git a/src/Custom/Responses/OpenAIResponse.cs b/src/Custom/Responses/OpenAIResponse.cs index 7e976c39f..1e54b8adc 100644 --- a/src/Custom/Responses/OpenAIResponse.cs +++ b/src/Custom/Responses/OpenAIResponse.cs @@ -1,9 +1,5 @@ -using System.ClientModel; -using System.ClientModel.Primitives; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; -using System.Text.Json; namespace OpenAI.Responses; @@ -12,7 +8,7 @@ namespace OpenAI.Responses; // - Renamed. [CodeGenType("Response")] [CodeGenSuppress("OutputText")] -public partial class OpenAIResponse +internal partial class OpenAIResponse { // CUSTOM: Renamed. [CodeGenMember("Background")] diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index 8834b879e..aa4568f2c 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -125,7 +125,7 @@ protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, O [Experimental("OPENAI001")] public string Model => _model; - public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { return CreateResponseAsync(inputItems, options, cancellationToken.ToRequestOptions() ?? new RequestOptions()); } @@ -145,7 +145,7 @@ internal async Task> CreateResponseAsync(IEnumerabl return ClientResult.FromValue(convenienceValue, protocolResult.GetRawResponse()); } - public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); @@ -155,7 +155,7 @@ public virtual ClientResult CreateResponse(IEnumerable> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual async Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); @@ -166,7 +166,7 @@ public virtual async Task> CreateResponseAsync(stri .ConfigureAwait(false); } - public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); @@ -192,7 +192,7 @@ public virtual async Task> CreateResponseAsync(Crea return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } - public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { return CreateResponseStreamingAsync(inputItems, options, cancellationToken.ToRequestOptions(streaming: true)); } @@ -233,7 +233,7 @@ internal AsyncCollectionResult CreateResponseStreamingA requestOptions.CancellationToken); } - public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); @@ -254,7 +254,7 @@ public virtual CollectionResult CreateResponseStreaming cancellationToken); } - public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); @@ -264,7 +264,7 @@ public virtual AsyncCollectionResult CreateResponseStre cancellationToken); } - public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); @@ -274,7 +274,7 @@ public virtual CollectionResult CreateResponseStreaming cancellationToken); } - public virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default) + internal virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default) { return GetResponseAsync(responseId, cancellationToken.ToRequestOptions() ?? new RequestOptions()); } @@ -293,7 +293,7 @@ internal async Task> GetResponseAsync(string respon return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } - public virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default) + internal virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); @@ -302,7 +302,7 @@ public virtual ClientResult GetResponse(string responseId, Cance return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } - public async Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default) + public virtual async Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); @@ -320,7 +320,7 @@ public virtual ClientResult GetResponse(GetResponseOptions optio return ClientResult.FromValue((ResponseResult)protocolResult, protocolResult.GetRawResponse()); } - public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) + internal virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) { return GetResponseStreamingAsync(responseId, cancellationToken.ToRequestOptions(streaming: true), startingAfter); } @@ -351,7 +351,7 @@ public virtual CollectionResult GetResponseStreaming(Ge cancellationToken); } - public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default) + public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); @@ -367,7 +367,7 @@ public AsyncCollectionResult GetResponseStreamingAsync( cancellationToken); } - public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) + internal virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(responseId, nameof(responseId)); @@ -393,21 +393,21 @@ public virtual ClientResult DeleteResponse(string respon return ClientResult.FromValue((ResponseDeletionResult)result, result.GetRawResponse()); } - public virtual async Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default) + public virtual async Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); ClientResult protocolResult = await CancelResponseAsync(responseId, cancellationToken.ToRequestOptions()).ConfigureAwait(false); - OpenAIResponse convenienceResult = (OpenAIResponse)protocolResult; + ResponseResult convenienceResult = (ResponseResult)protocolResult; return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } - public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default) + public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); ClientResult protocolResult = CancelResponse(responseId, cancellationToken.ToRequestOptions()); - OpenAIResponse convenienceResult = (OpenAIResponse)protocolResult; + ResponseResult convenienceResult = (ResponseResult)protocolResult; return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } @@ -421,7 +421,7 @@ public virtual ClientResult GetResponseInputItems(GetResponseI return ClientResult.FromValue((ResponseItemList)result, result.GetRawResponse()); } - public virtual async Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) + public virtual async Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); diff --git a/src/Custom/Responses/OpenAIResponsesModelFactory.cs b/src/Custom/Responses/OpenAIResponsesModelFactory.cs index fa96d3d3c..d8b8ae359 100644 --- a/src/Custom/Responses/OpenAIResponsesModelFactory.cs +++ b/src/Custom/Responses/OpenAIResponsesModelFactory.cs @@ -11,7 +11,7 @@ public static partial class OpenAIResponsesModelFactory { /// Initializes a new instance of . /// A new instance for mocking. - public static OpenAIResponse OpenAIResponse( + internal static OpenAIResponse OpenAIResponse( string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, diff --git a/src/Custom/Responses/ResponseCreationOptions.cs b/src/Custom/Responses/ResponseCreationOptions.cs index 82005e8b8..250810292 100644 --- a/src/Custom/Responses/ResponseCreationOptions.cs +++ b/src/Custom/Responses/ResponseCreationOptions.cs @@ -1,7 +1,5 @@ -using System; using System.ClientModel; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; namespace OpenAI.Responses; @@ -12,7 +10,7 @@ namespace OpenAI.Responses; [CodeGenType("CreateResponse")] [CodeGenVisibility(nameof(ResponseCreationOptions), CodeGenVisibility.Public)] [CodeGenSuppress(nameof(ResponseCreationOptions), typeof(IEnumerable))] -public partial class ResponseCreationOptions +internal partial class ResponseCreationOptions { // CUSTOM: Temporarily made internal. [CodeGenMember("Include")] diff --git a/src/Custom/Responses/ResponseResult.Serialization.cs b/src/Custom/Responses/ResponseResult.Serialization.cs index 760d050d2..c92ab45f4 100644 --- a/src/Custom/Responses/ResponseResult.Serialization.cs +++ b/src/Custom/Responses/ResponseResult.Serialization.cs @@ -107,10 +107,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("previous_response_id"u8); writer.WriteStringValue(PreviousResponseId); } - if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) + if (Optional.IsDefined(InternalModel) && !Patch.Contains("$.model"u8)) { writer.WritePropertyName("model"u8); - writer.WriteStringValue(Model.Value.ToString()); + writer.WriteStringValue(InternalModel.Value.ToString()); } if (Optional.IsDefined(Reasoning) && !Patch.Contains("$.reasoning"u8)) { @@ -163,14 +163,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) { writer.WritePropertyName("tool_choice"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(ToolChoice); -#else - using (JsonDocument document = JsonDocument.Parse(ToolChoice)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + writer.WriteObjectValue(ToolChoice, options); } if (Optional.IsDefined(Truncation) && !Patch.Contains("$.truncation"u8)) { @@ -290,7 +283,7 @@ internal static ResponseResult DeserializeResponseResult(JsonElement element, Bi string instructions = default; ResponseTextOptions text = default; IList tools = default; - BinaryData toolChoice = default; + ResponseToolChoice toolChoice = default; ResponseTruncationMode? truncation = default; string id = default; string @object = default; @@ -456,7 +449,7 @@ internal static ResponseResult DeserializeResponseResult(JsonElement element, Bi { continue; } - toolChoice = BinaryData.FromString(prop.Value.GetRawText()); + toolChoice = ResponseToolChoice.DeserializeResponseToolChoice(prop.Value, options); continue; } if (prop.NameEquals("truncation"u8)) diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs index 85c694352..63b07545c 100644 --- a/src/Custom/Responses/ResponseResult.cs +++ b/src/Custom/Responses/ResponseResult.cs @@ -30,7 +30,7 @@ internal ResponseResult(IDictionary metadata, float? temperature } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, BinaryData toolChoice, ResponseTruncationMode? truncation, string id, string @object, ResponseStatus? status, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IList output, string outputText, ResponseTokenUsage usage, bool parallelToolCalls, in JsonPatch patch) + internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, string id, string @object, ResponseStatus? status, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IList output, string outputText, ResponseTokenUsage usage, bool parallelToolCalls, in JsonPatch patch) { // Plugin customization: ensure initialization of collections Metadata = metadata ?? new ChangeTrackingDictionary(); @@ -39,7 +39,7 @@ internal ResponseResult(IDictionary metadata, float? temperature User = user; ServiceTier = serviceTier; PreviousResponseId = previousResponseId; - Model = model; + InternalModel = model; Reasoning = reasoning; Background = background; MaxOutputTokens = maxOutputTokens; @@ -79,7 +79,9 @@ internal ResponseResult(IDictionary metadata, float? temperature public string PreviousResponseId { get; } - internal InternalModelIdsResponses? Model { get; } + internal InternalModelIdsResponses? InternalModel { get; } + + public string Model => InternalModel?.ToString(); public ResponseReasoningOptions Reasoning { get; } @@ -93,7 +95,7 @@ internal ResponseResult(IDictionary metadata, float? temperature public IList Tools { get; } - public BinaryData ToolChoice { get; } + public ResponseToolChoice ToolChoice { get; } public ResponseTruncationMode? Truncation { get; } diff --git a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseCompletedUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseCompletedUpdate.Serialization.cs index e1c61e69f..5c128c06d 100644 --- a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseCompletedUpdate.Serialization.cs @@ -74,7 +74,7 @@ internal static StreamingResponseCompletedUpdate DeserializeStreamingResponseCom #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +89,7 @@ internal static StreamingResponseCompletedUpdate DeserializeStreamingResponseCom } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs b/src/Custom/Responses/StreamingResponseCompletedUpdate.cs similarity index 79% rename from src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs rename to src/Custom/Responses/StreamingResponseCompletedUpdate.cs index 5826fa001..0bb83e4e6 100644 --- a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseCompletedUpdate.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System.ClientModel.Primitives; using System.Diagnostics.CodeAnalysis; @@ -10,18 +6,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseCompletedUpdate : StreamingResponseUpdate { - internal StreamingResponseCompletedUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseCompleted, sequenceNumber) + internal StreamingResponseCompletedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseCompleted, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseCompletedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseCompletedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseCreatedUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseCreatedUpdate.Serialization.cs index c4fd82354..f48a1dea8 100644 --- a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseCreatedUpdate.Serialization.cs @@ -74,7 +74,7 @@ internal static StreamingResponseCreatedUpdate DeserializeStreamingResponseCreat #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +89,7 @@ internal static StreamingResponseCreatedUpdate DeserializeStreamingResponseCreat } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs b/src/Custom/Responses/StreamingResponseCreatedUpdate.cs similarity index 77% rename from src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs rename to src/Custom/Responses/StreamingResponseCreatedUpdate.cs index 6ff299861..06e7a88c9 100644 --- a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseCreatedUpdate.cs @@ -10,18 +10,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseCreatedUpdate : StreamingResponseUpdate { - internal StreamingResponseCreatedUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseCreated, sequenceNumber) + internal StreamingResponseCreatedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseCreated, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseCreatedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseCreatedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseFailedUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseFailedUpdate.Serialization.cs index 88ceb942e..fee04f968 100644 --- a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseFailedUpdate.Serialization.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System; using System.ClientModel.Primitives; using System.Text; @@ -74,7 +70,7 @@ internal static StreamingResponseFailedUpdate DeserializeStreamingResponseFailed #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +85,7 @@ internal static StreamingResponseFailedUpdate DeserializeStreamingResponseFailed } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs b/src/Custom/Responses/StreamingResponseFailedUpdate.cs similarity index 73% rename from src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs rename to src/Custom/Responses/StreamingResponseFailedUpdate.cs index f453bb291..55ad666b5 100644 --- a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseFailedUpdate.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System.ClientModel.Primitives; using System.Diagnostics.CodeAnalysis; @@ -10,18 +6,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseFailedUpdate : StreamingResponseUpdate { - internal StreamingResponseFailedUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseFailed, sequenceNumber) + internal StreamingResponseFailedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseFailed, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseFailedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseFailedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseInProgressUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseInProgressUpdate.Serialization.cs index 88f502e9a..9dbaec211 100644 --- a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseInProgressUpdate.Serialization.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System; using System.ClientModel.Primitives; using System.Text; @@ -74,7 +70,7 @@ internal static StreamingResponseInProgressUpdate DeserializeStreamingResponseIn #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +85,7 @@ internal static StreamingResponseInProgressUpdate DeserializeStreamingResponseIn } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs b/src/Custom/Responses/StreamingResponseInProgressUpdate.cs similarity index 79% rename from src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs rename to src/Custom/Responses/StreamingResponseInProgressUpdate.cs index 6f6e03872..77413b5af 100644 --- a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs +++ b/src/Custom/Responses/StreamingResponseInProgressUpdate.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System.ClientModel.Primitives; using System.Diagnostics.CodeAnalysis; @@ -10,18 +6,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseInProgressUpdate : StreamingResponseUpdate { - internal StreamingResponseInProgressUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseInProgress, sequenceNumber) + internal StreamingResponseInProgressUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseInProgress, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseInProgressUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseInProgressUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseIncompleteUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseIncompleteUpdate.Serialization.cs index 21805e20b..903c4e0fe 100644 --- a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseIncompleteUpdate.Serialization.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System; using System.ClientModel.Primitives; using System.Text; @@ -74,7 +70,7 @@ internal static StreamingResponseIncompleteUpdate DeserializeStreamingResponseIn #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +85,7 @@ internal static StreamingResponseIncompleteUpdate DeserializeStreamingResponseIn } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs b/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs similarity index 79% rename from src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs rename to src/Custom/Responses/StreamingResponseIncompleteUpdate.cs index f68155072..8fc48ec49 100644 --- a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs +++ b/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System.ClientModel.Primitives; using System.Diagnostics.CodeAnalysis; @@ -10,18 +6,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseIncompleteUpdate : StreamingResponseUpdate { - internal StreamingResponseIncompleteUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseIncomplete, sequenceNumber) + internal StreamingResponseIncompleteUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseIncomplete, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseIncompleteUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseIncompleteUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs b/src/Custom/Responses/StreamingResponseQueuedUpdate.Serialization.cs similarity index 97% rename from src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs rename to src/Custom/Responses/StreamingResponseQueuedUpdate.Serialization.cs index aaab4d10d..81f5a0a5c 100644 --- a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs +++ b/src/Custom/Responses/StreamingResponseQueuedUpdate.Serialization.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System; using System.ClientModel.Primitives; using System.Text; @@ -74,7 +70,7 @@ internal static StreamingResponseQueuedUpdate DeserializeStreamingResponseQueued #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - OpenAIResponse response = default; + ResponseResult response = default; foreach (var prop in element.EnumerateObject()) { if (prop.NameEquals("type"u8)) @@ -89,7 +85,7 @@ internal static StreamingResponseQueuedUpdate DeserializeStreamingResponseQueued } if (prop.NameEquals("response"u8)) { - response = OpenAIResponse.DeserializeOpenAIResponse(prop.Value, prop.Value.GetUtf8Bytes(), options); + response = ResponseResult.DeserializeResponseResult(prop.Value, prop.Value.GetUtf8Bytes(), options); continue; } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); diff --git a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs b/src/Custom/Responses/StreamingResponseQueuedUpdate.cs similarity index 73% rename from src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs rename to src/Custom/Responses/StreamingResponseQueuedUpdate.cs index 933b04b23..651dc4b6c 100644 --- a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseQueuedUpdate.cs @@ -1,7 +1,3 @@ -// - -#nullable disable - using System.ClientModel.Primitives; using System.Diagnostics.CodeAnalysis; @@ -10,18 +6,18 @@ namespace OpenAI.Responses [Experimental("OPENAI001")] public partial class StreamingResponseQueuedUpdate : StreamingResponseUpdate { - internal StreamingResponseQueuedUpdate(int sequenceNumber, OpenAIResponse response) : base(InternalResponseStreamEventType.ResponseQueued, sequenceNumber) + internal StreamingResponseQueuedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseQueued, sequenceNumber) { Response = response; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal StreamingResponseQueuedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, OpenAIResponse response) : base(kind, sequenceNumber, patch) + internal StreamingResponseQueuedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, in JsonPatch patch, ResponseResult response) : base(kind, sequenceNumber, patch) { Response = response; } #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } } } diff --git a/src/Generated/Models/Responses/OpenAIResponse.Serialization.cs b/src/Generated/Models/Responses/OpenAIResponse.Serialization.cs index c752390de..a6649ff6d 100644 --- a/src/Generated/Models/Responses/OpenAIResponse.Serialization.cs +++ b/src/Generated/Models/Responses/OpenAIResponse.Serialization.cs @@ -12,7 +12,7 @@ namespace OpenAI.Responses { - public partial class OpenAIResponse : IJsonModel + internal partial class OpenAIResponse : IJsonModel { internal OpenAIResponse() : this(null, default, default, null, default, null, null, null, default, default, null, null, null, null, default, null, null, default, default, null, null, null, null, default, default) { diff --git a/src/Generated/Models/Responses/OpenAIResponse.cs b/src/Generated/Models/Responses/OpenAIResponse.cs index 8b2c62e01..4072bbc80 100644 --- a/src/Generated/Models/Responses/OpenAIResponse.cs +++ b/src/Generated/Models/Responses/OpenAIResponse.cs @@ -13,7 +13,7 @@ namespace OpenAI.Responses { [Experimental("OPENAI001")] - public partial class OpenAIResponse + internal partial class OpenAIResponse { [Experimental("SCME0001")] private JsonPatch _patch; diff --git a/src/Generated/Models/Responses/ResponseCreationOptions.Serialization.cs b/src/Generated/Models/Responses/ResponseCreationOptions.Serialization.cs index 7b2d7dd8d..301c412c1 100644 --- a/src/Generated/Models/Responses/ResponseCreationOptions.Serialization.cs +++ b/src/Generated/Models/Responses/ResponseCreationOptions.Serialization.cs @@ -11,7 +11,7 @@ namespace OpenAI.Responses { - public partial class ResponseCreationOptions : IJsonModel + internal partial class ResponseCreationOptions : IJsonModel { public ResponseCreationOptions() : this(null, default, default, null, default, null, null, null, default, default, null, null, null, null, default, null, null, default, default, default, default) { diff --git a/src/Generated/Models/Responses/ResponseCreationOptions.cs b/src/Generated/Models/Responses/ResponseCreationOptions.cs index efd29f51f..bd78e5add 100644 --- a/src/Generated/Models/Responses/ResponseCreationOptions.cs +++ b/src/Generated/Models/Responses/ResponseCreationOptions.cs @@ -11,7 +11,7 @@ namespace OpenAI.Responses { [Experimental("OPENAI001")] - public partial class ResponseCreationOptions + internal partial class ResponseCreationOptions { [Experimental("SCME0001")] private JsonPatch _patch; diff --git a/tests/Responses/OpenAIResponsesModelFactoryTests.cs b/tests/Responses/OpenAIResponsesModelFactoryTests.cs index b1951358a..e163110ad 100644 --- a/tests/Responses/OpenAIResponsesModelFactoryTests.cs +++ b/tests/Responses/OpenAIResponsesModelFactoryTests.cs @@ -11,31 +11,31 @@ namespace OpenAI.Tests.Responses; [Category("Smoke")] public partial class OpenAIResponsesModelFactoryTests { - [Test] - public void OpenAIResponseWorks() - { - string id = "response_123"; - DateTimeOffset createdAt = DateTimeOffset.UtcNow; - ResponseStatus status = ResponseStatus.Completed; - string model = "gpt-4o"; - IEnumerable outputItems = [ - OpenAIResponsesModelFactory.MessageResponseItem(id: "msg_1", role: MessageRole.User, status: MessageStatus.Completed), - OpenAIResponsesModelFactory.ReasoningResponseItem(id: "reason_1", encryptedContent: "encrypted", status: ReasoningStatus.InProgress, summaryText: "summary") - ]; - - OpenAIResponse response = OpenAIResponsesModelFactory.OpenAIResponse( - id: id, - createdAt: createdAt, - status: status, - model: model, - outputItems: outputItems); - - Assert.That(response.Id, Is.EqualTo(id)); - Assert.That(response.CreatedAt, Is.EqualTo(createdAt)); - Assert.That(response.Status, Is.EqualTo(status)); - Assert.That(response.Model, Is.EqualTo(model)); - Assert.That(response.OutputItems.SequenceEqual(outputItems), Is.True); - } + // [Test] + // public void OpenAIResponseWorks() + // { + // string id = "response_123"; + // DateTimeOffset createdAt = DateTimeOffset.UtcNow; + // ResponseStatus status = ResponseStatus.Completed; + // string model = "gpt-4o"; + // IEnumerable outputItems = [ + // OpenAIResponsesModelFactory.MessageResponseItem(id: "msg_1", role: MessageRole.User, status: MessageStatus.Completed), + // OpenAIResponsesModelFactory.ReasoningResponseItem(id: "reason_1", encryptedContent: "encrypted", status: ReasoningStatus.InProgress, summaryText: "summary") + // ]; + + // OpenAIResponse response = OpenAIResponsesModelFactory.OpenAIResponse( + // id: id, + // createdAt: createdAt, + // status: status, + // model: model, + // outputItems: outputItems); + + // Assert.That(response.Id, Is.EqualTo(id)); + // Assert.That(response.CreatedAt, Is.EqualTo(createdAt)); + // Assert.That(response.Status, Is.EqualTo(status)); + // Assert.That(response.Model, Is.EqualTo(model)); + // Assert.That(response.OutputItems.SequenceEqual(outputItems), Is.True); + // } [Test] public void MessageResponseItemWorks() diff --git a/tests/Responses/ResponseStoreTests.cs b/tests/Responses/ResponseStoreTests.cs index 14a16775b..d286e1d4f 100644 --- a/tests/Responses/ResponseStoreTests.cs +++ b/tests/Responses/ResponseStoreTests.cs @@ -33,7 +33,7 @@ public async Task GetInputItemsWithPagination() ResponseItem.CreateUserMessageItem("Item 4") }; - OpenAIResponse response = await client.CreateResponseAsync(inputItems); + ResponseResult response = await client.CreateResponseAsync(new(inputItems)); // Paginate through input items with a small page size var options = new ResponseItemCollectionOptions() @@ -76,7 +76,7 @@ public async Task GetInputItemsWithMultiPartPagination() ResponseItem.CreateUserMessageItem("Item 4") }; - OpenAIResponse response = await client.CreateResponseAsync(inputItems); + ResponseResult response = await client.CreateResponseAsync(new(inputItems)); // Paginate through input items with a small page size var options = new ResponseItemCollectionOptions() @@ -121,7 +121,7 @@ public async Task GetInputItemsWithAfterIdPagination() ResponseItem.CreateUserMessageItem("C") }; - OpenAIResponse response = await client.CreateResponseAsync(inputItems); + ResponseResult response = await client.CreateResponseAsync(new(inputItems)); string afterId = null; await foreach (ResponseItem first in client.GetResponseInputItemsAsync(response.Id)) @@ -161,7 +161,7 @@ public async Task GetInputItemsWithOrderFiltering() ResponseItem.CreateUserMessageItem("Second") }; - OpenAIResponse response = await client.CreateResponseAsync(inputItems); + ResponseResult response = await client.CreateResponseAsync(new(inputItems)); // Ascending var ascOptions = new ResponseItemCollectionOptions() @@ -203,12 +203,12 @@ public async Task GetInputItemsHandlesLargeLimits() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateUserMessageItem("alpha"), ResponseItem.CreateUserMessageItem("beta"), ResponseItem.CreateUserMessageItem("gamma"), - ]); + ])); var options = new ResponseItemCollectionOptions() { PageSizeLimit = 100 }; @@ -228,12 +228,12 @@ public async Task GetInputItemsWithMinimalLimits() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateUserMessageItem("x"), ResponseItem.CreateUserMessageItem("y"), ResponseItem.CreateUserMessageItem("z"), - ]); + ])); var options = new ResponseItemCollectionOptions() { PageSizeLimit = 1 }; @@ -253,12 +253,12 @@ public async Task GetInputItemsWithCancellationToken() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateUserMessageItem("ct1"), ResponseItem.CreateUserMessageItem("ct2"), ResponseItem.CreateUserMessageItem("ct3"), - ]); + ])); using var cts = new System.Threading.CancellationTokenSource(); @@ -289,12 +289,12 @@ public async Task GetInputItemsWithCombinedOptions() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateUserMessageItem("co1"), ResponseItem.CreateUserMessageItem("co2"), ResponseItem.CreateUserMessageItem("co3"), - ]); + ])); using var cts = new System.Threading.CancellationTokenSource(TimeSpan.FromSeconds(30)); diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index ed7644a68..141ea3711 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -77,23 +77,21 @@ public async Task ComputerToolWithScreenshotRoundTrip() { OpenAIResponseClient client = GetTestClient("computer-use-preview-2025-03-11"); ResponseTool computerTool = ResponseTool.CreateComputerTool(ComputerToolEnvironment.Windows, 1024, 768); - ResponseCreationOptions responseOptions = new() - { - Tools = { computerTool }, - TruncationMode = ResponseTruncationMode.Auto, - }; - OpenAIResponse response = await client.CreateResponseAsync( - inputItems: + CreateResponseOptions responseOptions = new( [ ResponseItem.CreateDeveloperMessageItem("Call tools when the user asks to perform computer-related tasks like clicking interface elements."), ResponseItem.CreateUserMessageItem("Click on the Save button.") - ], - responseOptions); + ]) + { + Tools = { computerTool }, + Truncation = ResponseTruncationMode.Auto, + }; + ResponseResult response = await client.CreateResponseAsync(responseOptions); while (true) { - Assert.That(response.OutputItems.Count, Is.GreaterThan(0)); - ResponseItem outputItem = response.OutputItems?.LastOrDefault(); + Assert.That(response.Output.Count, Is.GreaterThan(0)); + ResponseItem outputItem = response.Output?.LastOrDefault(); if (outputItem is ComputerCallResponseItem computerCall) { if (computerCall.Action.Kind == ComputerCallActionKind.Screenshot) @@ -105,7 +103,9 @@ public async Task ComputerToolWithScreenshotRoundTrip() ComputerCallOutput.CreateScreenshotOutput(screenshotBytes, "image/png")); responseOptions.PreviousResponseId = response.Id; - response = await client.CreateResponseAsync([screenshotReply], responseOptions); + responseOptions.Input.Clear(); + responseOptions.Input.Add(screenshotReply); + response = await client.CreateResponseAsync(responseOptions); } else if (computerCall.Action.Kind == ComputerCallActionKind.Click) { @@ -123,9 +123,10 @@ public async Task ComputerToolWithScreenshotRoundTrip() || assistantText.Contains("please confirm"))) { responseOptions.PreviousResponseId = response.Id; - response = await client.CreateResponseAsync( - "Yes, proceed.", - responseOptions); + responseOptions.Input.Clear(); + responseOptions.Input.Add( + ResponseItem.CreateAssistantMessageItem("Yes, proceed.")); + response = await client.CreateResponseAsync(responseOptions); } else { @@ -138,9 +139,8 @@ public async Task ComputerToolWithScreenshotRoundTrip() public async Task WebSearchCall() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( - "What was a positive news story from today?", - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync( + new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Searching the internet, what's the weather like in Seattle?")]) { Tools = { @@ -149,11 +149,11 @@ public async Task WebSearchCall() ToolChoice = ResponseToolChoice.CreateWebSearchChoice() }); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -167,7 +167,7 @@ public async Task WebSearchCallStreaming() const string message = "Searching the internet, what's the weather like in San Francisco?"; - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)]) { Tools = { @@ -184,7 +184,7 @@ public async Task WebSearchCallStreaming() bool gotFinishedSearchItem = false; await foreach (StreamingResponseUpdate update - in client.CreateResponseStreamingAsync(message, responseOptions)) + in client.CreateResponseStreamingAsync(responseOptions)) { if (update is StreamingResponseWebSearchCallInProgressUpdate searchCallInProgressUpdate) { @@ -233,7 +233,7 @@ public async Task ResponseWithImageGenTool() { OpenAIResponseClient client = GetTestClient(); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { Tools = { @@ -248,21 +248,20 @@ public async Task ResponseWithImageGenTool() } }; - OpenAIResponse response = await client.CreateResponseAsync( - "Generate an image of gray tabby cat hugging an otter with an orange scarf", + ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -274,7 +273,7 @@ public async Task ImageGenToolStreaming() const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)]) { Tools = { @@ -296,7 +295,7 @@ public async Task ImageGenToolStreaming() bool gotCompletedResponseItem = false; await foreach (StreamingResponseUpdate update - in client.CreateResponseStreamingAsync(message, responseOptions)) + in client.CreateResponseStreamingAsync(responseOptions)) { if (update is StreamingResponseImageGenerationCallPartialImageUpdate imageGenCallInPartialUpdate) { @@ -357,7 +356,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { Tools = { @@ -368,21 +367,20 @@ public async Task ImageGenToolInputMaskWithImageBytes() } }; - OpenAIResponse response = await client.CreateResponseAsync( - "Generate an image of gray tabby cat hugging an otter with an orange scarf", + ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -392,7 +390,7 @@ public async Task ImageGenToolInputMaskWithImageUri() { OpenAIResponseClient client = GetTestClient(); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { Tools = { @@ -403,21 +401,20 @@ public async Task ImageGenToolInputMaskWithImageUri() } }; - OpenAIResponse response = await client.CreateResponseAsync( - "Generate an image of gray tabby cat hugging an otter with an orange scarf", + ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -444,7 +441,7 @@ public async Task ImageGenToolInputMaskWithFileId() } Validate(file); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { Tools = { @@ -455,21 +452,20 @@ public async Task ImageGenToolInputMaskWithFileId() } }; - OpenAIResponse response = await client.CreateResponseAsync( - "Generate an image of gray tabby cat hugging an otter with an orange scarf", + ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -482,7 +478,7 @@ public async Task StreamingResponses() List inputItems = [ResponseItem.CreateUserMessageItem("Hello, world!")]; List deltaTextSegments = []; string finalResponseText = null; - await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync(inputItems)) + await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync(new(inputItems))) { Console.WriteLine(ModelReaderWriter.Write(update)); if (update is StreamingResponseOutputTextDeltaUpdate outputTextDeltaUpdate) @@ -492,7 +488,7 @@ public async Task StreamingResponses() } else if (update is StreamingResponseCompletedUpdate responseCompletedUpdate) { - finalResponseText = responseCompletedUpdate.Response.GetOutputText(); + finalResponseText = responseCompletedUpdate.Response.OutputText; } } Assert.That(deltaTextSegments, Has.Count.GreaterThan(0)); @@ -507,7 +503,13 @@ public async Task ResponsesHelloWorldWithTool(string model) { OpenAIResponseClient client = GetTestClient(model); - ResponseCreationOptions options = new() + CreateResponseOptions options = new( + [ + ResponseItem.CreateUserMessageItem( + [ + ResponseContentPart.CreateInputTextPart("good morning, responses!"), + ]), + ]) { Tools = { @@ -527,16 +529,10 @@ public async Task ResponsesHelloWorldWithTool(string model) """), strictModeEnabled: false), }, - TruncationMode = ResponseTruncationMode.Auto, + Truncation = ResponseTruncationMode.Auto, }; - OpenAIResponse response = await client.CreateResponseAsync( - [ - ResponseItem.CreateUserMessageItem( - [ - ResponseContentPart.CreateInputTextPart("good morning, responses!"), - ]), - ], + ResponseResult response = await client.CreateResponseAsync( options); Assert.That(response.Id, Is.Not.Null.And.Not.Empty); @@ -546,7 +542,7 @@ public async Task ResponsesHelloWorldWithTool(string model) Assert.That(response.PreviousResponseId, Is.Null); // Observed: input may not exist on normal responses // Assert.That(response.Input.Count, Is.EqualTo(1)); - Assert.That(response.OutputItems.Count, Is.EqualTo(1)); + Assert.That(response.Output.Count, Is.EqualTo(1)); } [RecordedTest] @@ -554,9 +550,9 @@ public async Task ResponsesWithReasoning() { OpenAIResponseClient client = GetTestClient("o3-mini"); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")]) { - ReasoningOptions = new() + Reasoning = new() { ReasoningSummaryVerbosity = ResponseReasoningSummaryVerbosity.Detailed, ReasoningEffortLevel = ResponseReasoningEffortLevel.Low, @@ -568,21 +564,21 @@ public async Task ResponsesWithReasoning() Instructions = "Perform reasoning over any questions asked by the user.", }; - OpenAIResponse response = await client.CreateResponseAsync([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")], options); + ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); Assert.That(response.Id, Is.Not.Null); Assert.That(response.CreatedAt, Is.GreaterThan(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero))); - Assert.That(response.TruncationMode, Is.EqualTo(ResponseTruncationMode.Disabled)); - Assert.That(response.MaxOutputTokenCount, Is.Null); + Assert.That(response.Truncation, Is.EqualTo(ResponseTruncationMode.Disabled)); + Assert.That(response.MaxOutputTokens, Is.Null); Assert.That(response.Model, Does.StartWith("o3-mini")); Assert.That(response.Usage, Is.Not.Null); Assert.That(response.Usage.OutputTokenDetails, Is.Not.Null); Assert.That(response.Usage.OutputTokenDetails.ReasoningTokenCount, Is.GreaterThan(0)); Assert.That(response.Metadata, Is.Not.Null.Or.Empty); Assert.That(response.Metadata["superfluous_key"], Is.EqualTo("superfluous_value")); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - ReasoningResponseItem reasoningItem = response.OutputItems[0] as ReasoningResponseItem; - MessageResponseItem messageItem = response.OutputItems[1] as MessageResponseItem; + Assert.That(response.Output, Has.Count.EqualTo(2)); + ReasoningResponseItem reasoningItem = response.Output[0] as ReasoningResponseItem; + MessageResponseItem messageItem = response.Output[1] as MessageResponseItem; Assert.That(reasoningItem.SummaryParts, Has.Count.GreaterThan(0)); Assert.That(reasoningItem.GetSummaryText(), Is.Not.Null.And.Not.Empty); Assert.That(reasoningItem.Id, Is.Not.Null.And.Not.Empty); @@ -602,10 +598,9 @@ ResponseContentPart contentPart await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync( - [inputItem], - new ResponseCreationOptions() + new ([inputItem]) { - TruncationMode = ResponseTruncationMode.Auto, + Truncation = ResponseTruncationMode.Auto, })) { Console.WriteLine(ModelReaderWriter.Write(update)); @@ -617,11 +612,11 @@ public async Task CanDeleteResponse() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync([ResponseItem.CreateUserMessageItem("Hello, model!")]); + ResponseResult response = await client.CreateResponseAsync(new([ResponseItem.CreateUserMessageItem("Hello, model!")])); async Task RetrieveThatResponseAsync() { - OpenAIResponse retrievedResponse = await client.GetResponseAsync(response.Id); + ResponseResult retrievedResponse = await client.GetResponseAsync(new(response.Id)); Assert.That(retrievedResponse.Id, Is.EqualTo(response.Id)); } @@ -638,14 +633,13 @@ public async Task CanOptOutOfStorage() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( - [ResponseItem.CreateUserMessageItem("Hello, model!")], - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync( + new ([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - StoredOutputEnabled = false, + Store = false, }); - ClientResultException expectedException = Assert.ThrowsAsync(async () => await client.GetResponseAsync(response.Id)); + ClientResultException expectedException = Assert.ThrowsAsync(async () => await client.GetResponseAsync(new(response.Id))); Assert.That(expectedException.Message, Does.Contain("not found")); } @@ -655,11 +649,11 @@ public async Task ResponseServiceTierWorks() OpenAIResponseClient client = GetTestClient(); MessageResponseItem message = ResponseItem.CreateUserMessageItem("Using a comprehensive evaluation of popular media in the 1970s and 1980s, what were the most common sci-fi themes?"); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([message]) { ServiceTier = ResponseServiceTier.Default, }; - OpenAIResponse response = await client.CreateResponseAsync([message], options); + ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); Assert.That(response.ServiceTier, Is.EqualTo(ResponseServiceTier.Default)); @@ -669,17 +663,16 @@ public async Task ResponseServiceTierWorks() public async Task OutputTextMethod() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( - "Respond with only the word hello."); - Assert.That(response?.GetOutputText()?.Length, Is.GreaterThan(0).And.LessThan(7)); - Assert.That(response?.GetOutputText()?.ToLower(), Does.Contain("hello")); + ResponseResult response = await client.CreateResponseAsync( + new ([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); + Assert.That(response?.OutputText?.Length, Is.GreaterThan(0).And.LessThan(7)); + Assert.That(response?.OutputText?.ToLower(), Does.Contain("hello")); - response.OutputItems.Add(ResponseItem.CreateAssistantMessageItem("More text!")); - Assert.That(response?.GetOutputText()?.ToLower(), Does.EndWith("more text!")); + response.Output.Add(ResponseItem.CreateAssistantMessageItem("More text!")); + Assert.That(response?.OutputText?.ToLower(), Does.EndWith("more text!")); response = await client.CreateResponseAsync( - "How's the weather?", - new ResponseCreationOptions() + new ([ResponseItem.CreateUserMessageItem("How's the weather?")]) { Tools = { @@ -691,7 +684,7 @@ public async Task OutputTextMethod() }, ToolChoice = ResponseToolChoice.CreateRequiredChoice(), }); - Assert.That(response.GetOutputText(), Is.Null); + Assert.That(response.OutputText, Is.Null); } [RecordedTest] @@ -699,13 +692,13 @@ public async Task MessageHistoryWorks() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateDeveloperMessageItem("You are a helpful assistant."), ResponseItem.CreateUserMessageItem("Hello, Assistant, my name is Bob!"), ResponseItem.CreateAssistantMessageItem("Hello, Bob. It's a nice, sunny day!"), ResponseItem.CreateUserMessageItem("What's my name and what did you tell me the weather was like?"), - ]); + ])); Assert.That(response, Is.Not.Null); } @@ -719,17 +712,17 @@ public async Task ImageInputWorks() string imagePath = Path.Join("Assets", "images_dog_and_cat.png"); BinaryData imageBytes = BinaryData.FromBytes(await File.ReadAllBytesAsync(imagePath)); - OpenAIResponse response = await client.CreateResponseAsync( + ResponseResult response = await client.CreateResponseAsync(new( [ ResponseItem.CreateUserMessageItem( [ ResponseContentPart.CreateInputTextPart("Please describe this picture for me"), ResponseContentPart.CreateInputImagePart(imageBytes, "image/png", ResponseImageDetailLevel.Low), ]), - ]); + ])); - Console.WriteLine(response.GetOutputText()); - Assert.That(response.GetOutputText().ToLowerInvariant(), Does.Contain("dog").Or.Contain("cat").IgnoreCase); + Console.WriteLine(response.OutputText); + Assert.That(response.OutputText.ToLowerInvariant(), Does.Contain("dog").Or.Contain("cat").IgnoreCase); } [RecordedTest] @@ -756,9 +749,9 @@ public async Task FileInputFromIdWorks() ResponseContentPart.CreateInputFilePart(newFileToUse.Id), ]); - OpenAIResponse response = await client.CreateResponseAsync([messageItem]); + ResponseResult response = await client.CreateResponseAsync(new([messageItem])); - Assert.That(response?.GetOutputText()?.ToLower(), Does.Contain("pizza")); + Assert.That(response?.OutputText?.ToLower(), Does.Contain("pizza")); } [RecordedTest] @@ -776,9 +769,9 @@ public async Task FileInputFromBinaryWorks() ResponseContentPart.CreateInputFilePart(fileBytes, "application/pdf", "test_favorite_foods.pdf"), ]); - OpenAIResponse response = await client.CreateResponseAsync([messageItem]); + ResponseResult response = await client.CreateResponseAsync(new([messageItem])); - Assert.That(response?.GetOutputText()?.ToLower(), Does.Contain("pizza")); + Assert.That(response?.OutputText?.ToLower(), Does.Contain("pizza")); } [RecordedTest] @@ -789,7 +782,7 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr { const string instructions = "Always begin your replies with 'Arr, matey'"; - List messages = new(); + List messages = new(); if (instructionMethod == ResponsesTestInstructionMethod.SystemMessage) { @@ -803,7 +796,7 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr const string userMessage = "Hello, model!"; messages.Add(ResponseItem.CreateUserMessageItem(userMessage)); - ResponseCreationOptions options = new(); + CreateResponseOptions options = new(messages); if (instructionMethod == ResponsesTestInstructionMethod.InstructionsProperty) { @@ -811,16 +804,16 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr } OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync(messages, options); + ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); - Assert.That(response.OutputItems, Is.Not.Null.And.Not.Empty); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That((response.OutputItems[0] as MessageResponseItem).Content, Is.Not.Null.And.Not.Empty); - Assert.That((response.OutputItems[0] as MessageResponseItem).Content[0].Text, Does.StartWith("Arr, matey")); + Assert.That(response.Output, Is.Not.Null.And.Not.Empty); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That((response.Output[0] as MessageResponseItem).Content, Is.Not.Null.And.Not.Empty); + Assert.That((response.Output[0] as MessageResponseItem).Content[0].Text, Does.StartWith("Arr, matey")); - OpenAIResponse retrievedResponse = await client.GetResponseAsync(response.Id); - Assert.That((retrievedResponse?.OutputItems?.FirstOrDefault() as MessageResponseItem)?.Content?.FirstOrDefault()?.Text, Does.StartWith("Arr, matey")); + ResponseResult retrievedResponse = await client.GetResponseAsync(new(response.Id)); + Assert.That((retrievedResponse?.Output?.FirstOrDefault() as MessageResponseItem)?.Content?.FirstOrDefault()?.Text, Does.StartWith("Arr, matey")); if (instructionMethod == ResponsesTestInstructionMethod.InstructionsProperty) { @@ -858,11 +851,10 @@ public async Task TwoTurnCrossModel() OpenAIResponseClient client2 = GetTestClient("o3-mini"); - OpenAIResponse response = await client.CreateResponseAsync( - [ResponseItem.CreateUserMessageItem("Hello, Assistant! My name is Travis.")]); - OpenAIResponse response2 = await client2.CreateResponseAsync( - [ResponseItem.CreateUserMessageItem("What's my name?")], - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync(new( + [ResponseItem.CreateUserMessageItem("Hello, Assistant! My name is Travis.")])); + ResponseResult response2 = await client2.CreateResponseAsync( + new ([ResponseItem.CreateUserMessageItem("What's my name?")]) { PreviousResponseId = response.Id, }); @@ -875,11 +867,10 @@ public async Task StructuredOutputs(string modelName) { OpenAIResponseClient client = GetTestClient(modelName); - OpenAIResponse response = await client.CreateResponseAsync( - "Write a JSON document with a list of five animals", - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync( + new ([ResponseItem.CreateUserMessageItem("Write a JSON document with a list of five animals")]) { - TextOptions = new ResponseTextOptions() + Text = new ResponseTextOptions() { TextFormat = ResponseTextFormat.CreateJsonSchemaFormat( "data_list", @@ -902,10 +893,10 @@ public async Task StructuredOutputs(string modelName) }); Assert.That( - response?.TextOptions?.TextFormat?.Kind, + response?.Text?.TextFormat?.Kind, Is.EqualTo(ResponseTextFormatKind.JsonSchema)); - Assert.That(response?.OutputItems, Has.Count.EqualTo(1)); - MessageResponseItem message = response.OutputItems[0] as MessageResponseItem; + Assert.That(response?.Output, Has.Count.EqualTo(1)); + MessageResponseItem message = response.Output[0] as MessageResponseItem; Assert.That(message?.Content, Has.Count.EqualTo(1)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -922,17 +913,16 @@ public async Task FunctionCallWorks() { OpenAIResponseClient client = GetTestClient(); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { Tools = { s_GetWeatherAtLocationTool } }; - OpenAIResponse response = await client.CreateResponseAsync( - [ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")], + ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.OutputItems, Has.Count.EqualTo(1)); - FunctionCallResponseItem functionCall = response.OutputItems[0] as FunctionCallResponseItem; + Assert.That(response.Output, Has.Count.EqualTo(1)); + FunctionCallResponseItem functionCall = response.Output[0] as FunctionCallResponseItem; Assert.That(functionCall, Is.Not.Null); Assert.That(functionCall!.Id, Has.Length.GreaterThan(0)); Assert.That(functionCall.FunctionName, Is.EqualTo("get_weather_at_location")); @@ -944,18 +934,17 @@ public async Task FunctionCallWorks() _ = document.RootElement.GetProperty("location"); }); - ResponseCreationOptions turn2Options = new() + ResponseItem functionReply = ResponseItem.CreateFunctionCallOutputItem(functionCall.CallId, "22 celcius and windy"); + CreateResponseOptions turn2Options = new([functionReply]) { PreviousResponseId = response.Id, Tools = { s_GetWeatherAtLocationTool }, }; - ResponseItem functionReply = ResponseItem.CreateFunctionCallOutputItem(functionCall.CallId, "22 celcius and windy"); - OpenAIResponse turn2Response = await client.CreateResponseAsync( - [functionReply], + ResponseResult turn2Response = await client.CreateResponseAsync( turn2Options); - Assert.That(turn2Response.OutputItems?.Count, Is.EqualTo(1)); - MessageResponseItem turn2Message = turn2Response!.OutputItems[0] as MessageResponseItem; + Assert.That(turn2Response.Output?.Count, Is.EqualTo(1)); + MessageResponseItem turn2Message = turn2Response!.Output[0] as MessageResponseItem; Assert.That(turn2Message, Is.Not.Null); Assert.That(turn2Message!.Role, Is.EqualTo(MessageRole.Assistant)); Assert.That(turn2Message.Content, Has.Count.EqualTo(1)); @@ -970,13 +959,12 @@ public async Task FunctionCallStreamingWorks() { OpenAIResponseClient client = GetTestClient(); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { Tools = { s_GetWeatherAtLocationTool } }; AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync( - "What should I wear for the weather in San Francisco right now?", options); int functionCallArgumentsDeltaUpdateCount = 0; @@ -1020,17 +1008,16 @@ public async Task MaxTokens() { OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( - "Write three haikus about tropical fruit", - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync( + new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Write three haikus about tropical fruit")]) { - MaxOutputTokenCount = 20, + MaxOutputTokens = 20, }); Assert.That( - response?.IncompleteStatusDetails?.Reason, + response?.IncompleteDetails?.Reason, Is.EqualTo(ResponseIncompleteStatusReason.MaxOutputTokens)); - MessageResponseItem message = response?.OutputItems?.FirstOrDefault() as MessageResponseItem; ; + MessageResponseItem message = response?.Output?.FirstOrDefault() as MessageResponseItem; ; Assert.That(message?.Content?.FirstOrDefault(), Is.Not.Null); Assert.That(message?.Status, Is.EqualTo(MessageStatus.Incomplete)); } @@ -1043,21 +1030,20 @@ public async Task FunctionToolChoiceWorks() ResponseToolChoice toolChoice = ResponseToolChoice.CreateFunctionChoice(s_GetWeatherAtLocationToolName); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { Tools = { s_GetWeatherAtLocationTool }, ToolChoice = toolChoice, }; - OpenAIResponse response = await client.CreateResponseAsync( - [ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")], + ResponseResult response = await client.CreateResponseAsync( options); Assert.That(response.ToolChoice, Is.Not.Null); Assert.That(response.ToolChoice.Kind, Is.EqualTo(ResponseToolChoiceKind.Function)); Assert.That(response.ToolChoice.FunctionName, Is.EqualTo(toolChoice.FunctionName)); - FunctionCallResponseItem functionCall = response.OutputItems.FirstOrDefault() as FunctionCallResponseItem; + FunctionCallResponseItem functionCall = response.Output.FirstOrDefault() as FunctionCallResponseItem; Assert.That(functionCall, Is.Not.Null); Assert.That(functionCall.FunctionName, Is.EqualTo(toolChoice.FunctionName)); } @@ -1068,12 +1054,12 @@ public async Task CanStreamBackgroundResponses() { OpenAIResponseClient client = GetTestClient("gpt-4.1-mini"); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - BackgroundModeEnabled = true, + Background = true, }; - AsyncCollectionResult updates = client.CreateResponseStreamingAsync("Hello, model!", options); + AsyncCollectionResult updates = client.CreateResponseStreamingAsync(options); string queuedResponseId = null; int lastSequenceNumber = 0; @@ -1093,17 +1079,17 @@ public async Task CanStreamBackgroundResponses() Assert.That(lastSequenceNumber, Is.GreaterThan(0)); // Try getting the response without streaming it. - OpenAIResponse retrievedResponse = await client.GetResponseAsync(queuedResponseId); + ResponseResult retrievedResponse = await client.GetResponseAsync(new(queuedResponseId)); Assert.That(retrievedResponse, Is.Not.Null); Assert.That(retrievedResponse.Id, Is.EqualTo(queuedResponseId)); - Assert.That(retrievedResponse.BackgroundModeEnabled, Is.True); + Assert.That(retrievedResponse.Background, Is.True); Assert.That(retrievedResponse.Status, Is.EqualTo(ResponseStatus.Queued)); // Now try continuing the stream. - AsyncCollectionResult continuedUpdates = client.GetResponseStreamingAsync(queuedResponseId, startingAfter: lastSequenceNumber); + AsyncCollectionResult continuedUpdates = client.GetResponseStreamingAsync(new(queuedResponseId) { StartingAfter = lastSequenceNumber }); - OpenAIResponse completedResponse = null; + ResponseResult completedResponse = null; int? firstContinuedSequenceNumber = null; await foreach (StreamingResponseUpdate update in continuedUpdates) @@ -1121,7 +1107,7 @@ public async Task CanStreamBackgroundResponses() Assert.That(firstContinuedSequenceNumber, Is.EqualTo(lastSequenceNumber + 1)); Assert.That(completedResponse?.Id, Is.EqualTo(queuedResponseId)); - Assert.That(completedResponse?.OutputItems?.FirstOrDefault(), Is.Not.Null); + Assert.That(completedResponse?.Output?.FirstOrDefault(), Is.Not.Null); } [RecordedTest] @@ -1129,19 +1115,19 @@ public async Task CanCancelBackgroundResponses() { OpenAIResponseClient client = GetTestClient("gpt-4.1-mini"); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - BackgroundModeEnabled = true, + Background = true, }; - OpenAIResponse response = await client.CreateResponseAsync("Hello, model!", options); + ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); Assert.That(response.Id, Is.Not.Null.And.Not.Empty); - Assert.That(response.BackgroundModeEnabled, Is.True); + Assert.That(response.Background, Is.True); Assert.That(response.Status, Is.EqualTo(ResponseStatus.Queued)); - OpenAIResponse cancelledResponse = await client.CancelResponseAsync(response.Id); + ResponseResult cancelledResponse = await client.CancelResponseAsync(response.Id); Assert.That(cancelledResponse.Id, Is.EqualTo(response.Id)); Assert.That(cancelledResponse.Status, Is.EqualTo(ResponseStatus.Cancelled)); } diff --git a/tests/Responses/ResponsesToolTests.cs b/tests/Responses/ResponsesToolTests.cs index ceef4841d..da7757ae8 100644 --- a/tests/Responses/ResponsesToolTests.cs +++ b/tests/Responses/ResponsesToolTests.cs @@ -34,7 +34,7 @@ public async Task MCPToolWorks() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -47,11 +47,11 @@ public async Task MCPToolWorks() OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options); - Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); + ResponseResult response = await client.CreateResponseAsync(options); + Assert.That(response.Output, Has.Count.GreaterThan(0)); // Check tool list. - List toolDefinitionListItems = response.OutputItems.OfType().ToList(); + List toolDefinitionListItems = response.Output.OfType().ToList(); Assert.That(toolDefinitionListItems, Has.Count.EqualTo(1)); McpToolDefinitionListItem listItem = toolDefinitionListItems[0]; @@ -63,7 +63,7 @@ public async Task MCPToolWorks() Assert.That(rollToolDefinition.Annotations, Is.Not.Null); // Check tool call. - List toolCallItems = response.OutputItems.OfType().ToList(); + List toolCallItems = response.Output.OfType().ToList(); Assert.That(toolCallItems, Has.Count.EqualTo(1)); McpToolCallItem toolCallItem = toolCallItems[0]; @@ -74,7 +74,7 @@ public async Task MCPToolWorks() Assert.That(toolCallItem.Error, Is.Null); // Check assistant message. - MessageResponseItem assistantMessageItem = response.OutputItems.Last() as MessageResponseItem; + MessageResponseItem assistantMessageItem = response.Output.Last() as MessageResponseItem; Assert.That(assistantMessageItem, Is.Not.Null); } @@ -86,7 +86,7 @@ public async Task MCPToolStreamingWorks() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -99,7 +99,7 @@ public async Task MCPToolStreamingWorks() OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync("Roll 2d4+1", options); + AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(options); int mcpCallArgumentsDeltaUpdateCount = 0; int mcpCallArgumentsDoneUpdateCount = 0; @@ -200,7 +200,7 @@ public async Task MCPToolNeverRequiresApproval(bool useGlobalPolicy) } }); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -213,13 +213,13 @@ public async Task MCPToolNeverRequiresApproval(bool useGlobalPolicy) OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options); - Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + ResponseResult response = await client.CreateResponseAsync(options); + Assert.That(response.Output, Has.Count.GreaterThan(0)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); // Confirm there are no approval requests and that the tool was called. - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); } [RecordedTest] @@ -241,7 +241,7 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) } }); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -254,22 +254,24 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - OpenAIResponse response1 = await client.CreateResponseAsync("Roll 2d4+1", options); - Assert.That(response1.OutputItems, Has.Count.GreaterThan(0)); - Assert.That(response1.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response1.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); + ResponseResult response1 = await client.CreateResponseAsync(options); + Assert.That(response1.Output, Has.Count.GreaterThan(0)); + Assert.That(response1.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response1.Output.OfType().ToList(), Has.Count.EqualTo(0)); // Check that it stopped at the approval request. - McpToolCallApprovalRequestItem approvalRequestItem = response1.OutputItems.Last() as McpToolCallApprovalRequestItem; + McpToolCallApprovalRequestItem approvalRequestItem = response1.Output.Last() as McpToolCallApprovalRequestItem; Assert.That(approvalRequestItem, Is.Not.Null); // Prepare the response. McpToolCallApprovalResponseItem approvalResponseItem = new(approvalRequestItem.Id, true); options.PreviousResponseId = response1.Id; + options.Input.Clear(); + options.Input.Add(approvalResponseItem); - OpenAIResponse response2 = await client.CreateResponseAsync([approvalResponseItem], options); - Assert.That(response2.OutputItems, Has.Count.GreaterThan(0)); - Assert.That(response2.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + ResponseResult response2 = await client.CreateResponseAsync(options); + Assert.That(response2.Output, Has.Count.GreaterThan(0)); + Assert.That(response2.Output.OfType().ToList(), Has.Count.EqualTo(1)); } [RecordedTest] @@ -280,7 +282,7 @@ public async Task MCPToolWithAllowedTools() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -297,12 +299,12 @@ public async Task MCPToolWithAllowedTools() OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options); - Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); + ResponseResult response = await client.CreateResponseAsync(options); + Assert.That(response.Output, Has.Count.GreaterThan(0)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); - List toolCallItems = response.OutputItems.OfType().ToList(); + List toolCallItems = response.Output.OfType().ToList(); Assert.That(toolCallItems, Has.Count.EqualTo(1)); McpToolCallItem toolCallItem = toolCallItems[0]; @@ -321,7 +323,7 @@ public async Task MCPToolWithDisallowedTools() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - ResponseCreationOptions options = new() + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) { Tools = { new McpTool(serverLabel, serverUri) @@ -338,11 +340,11 @@ public async Task MCPToolWithDisallowedTools() OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); - OpenAIResponse response = await client.CreateResponseAsync("Roll 2d4+1", options); - Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); - Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); + ResponseResult response = await client.CreateResponseAsync(options); + Assert.That(response.Output, Has.Count.GreaterThan(0)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); } [RecordedTest] @@ -367,21 +369,20 @@ public async Task FileSearch() OpenAIResponseClient client = GetTestClient(); - OpenAIResponse response = await client.CreateResponseAsync( - "Using the file search tool, what's Travis's favorite food?", - new ResponseCreationOptions() + ResponseResult response = await client.CreateResponseAsync( + new([ResponseItem.CreateUserMessageItem("Using the file search tool, what's Travis's favorite food?")]) { Tools = { ResponseTool.CreateFileSearchTool(vectorStoreIds: [vectorStore.Id]), } }); - Assert.That(response.OutputItems?.Count, Is.EqualTo(2)); - FileSearchCallResponseItem fileSearchCall = response.OutputItems[0] as FileSearchCallResponseItem; + Assert.That(response.Output?.Count, Is.EqualTo(2)); + FileSearchCallResponseItem fileSearchCall = response.Output[0] as FileSearchCallResponseItem; Assert.That(fileSearchCall, Is.Not.Null); Assert.That(fileSearchCall?.Status, Is.EqualTo(FileSearchCallStatus.Completed)); Assert.That(fileSearchCall?.Queries, Has.Count.GreaterThan(0)); - MessageResponseItem message = response.OutputItems[1] as MessageResponseItem; + MessageResponseItem message = response.Output[1] as MessageResponseItem; Assert.That(message, Is.Not.Null); ResponseContentPart messageContentPart = message.Content?.FirstOrDefault(); Assert.That(messageContentPart, Is.Not.Null); @@ -403,21 +404,20 @@ public async Task CodeInterpreterToolWithoutFileIds() OpenAIResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration())); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) { Tools = { codeInterpreterTool }, }; - OpenAIResponse response = await client.CreateResponseAsync( - "Calculate the factorial of 5 using Python code.", + ResponseResult response = await client.CreateResponseAsync( responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -434,22 +434,21 @@ public async Task CodeInterpreterToolWithEmptyFileIds() OpenAIResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(new AutomaticCodeInterpreterToolContainerConfiguration())); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.")]) { Tools = { codeInterpreterTool }, }; - OpenAIResponse response = await client.CreateResponseAsync( - "Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.", + ResponseResult response = await client.CreateResponseAsync( responseOptions); Assert.That(response, Is.Not.Null); Assert.That(response, Is.Not.Null); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -478,21 +477,20 @@ public async Task CodeInterpreterToolWithContainerIdFromContainerApi() { // Create CodeInterpreter tool with the container ID ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(containerId)); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) { Tools = { codeInterpreterTool }, }; - OpenAIResponse response = await client.CreateResponseAsync( - "Calculate the factorial of 5 using Python code.", + ResponseResult response = await client.CreateResponseAsync( responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.OutputItems, Has.Count.EqualTo(2)); - Assert.That(response.OutputItems[0], Is.InstanceOf()); - Assert.That(response.OutputItems[1], Is.InstanceOf()); + Assert.That(response.Output, Has.Count.EqualTo(2)); + Assert.That(response.Output[0], Is.InstanceOf()); + Assert.That(response.Output[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; + MessageResponseItem message = (MessageResponseItem)response.Output[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -544,17 +542,16 @@ public async Task CodeInterpreterToolWithUploadedFileIds() // Create CodeInterpreter tool with uploaded file IDs ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds))); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Analyze the CSV data in the uploaded file and create a simple visualization. Also run the Python script that was uploaded.")]) { Tools = { codeInterpreterTool }, }; - OpenAIResponse response = await client.CreateResponseAsync( - "Analyze the CSV data in the uploaded file and create a simple visualization. Also run the Python script that was uploaded.", + ResponseResult response = await client.CreateResponseAsync( responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.OutputItems, Is.Not.Null.And.Not.Empty); + Assert.That(response.Output, Is.Not.Null.And.Not.Empty); // Basic validation that the response was created successfully Assert.That(response.Id, Is.Not.Null.And.Not.Empty); @@ -585,13 +582,11 @@ public async Task CodeInterpreterToolStreaming() OpenAIResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(new AutomaticCodeInterpreterToolContainerConfiguration())); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code and show me the code step by step.")]) { Tools = { codeInterpreterTool }, }; - const string message = "Calculate the factorial of 5 using Python code and show me the code step by step."; - int inProgressCount = 0; int interpretingCount = 0; int codeDeltaCount = 0; @@ -601,7 +596,7 @@ public async Task CodeInterpreterToolStreaming() StringBuilder codeBuilder = new StringBuilder(); await foreach (StreamingResponseUpdate update - in client.CreateResponseStreamingAsync(message, responseOptions)) + in client.CreateResponseStreamingAsync(responseOptions)) { ValidateCodeInterpreterEvent(ref inProgressCount, ref interpretingCount, ref codeDeltaCount, ref codeDoneCount, ref completedCount, ref gotFinishedCodeInterpreterItem, codeBuilder, update); } @@ -634,13 +629,11 @@ public async Task CodeInterpreterToolStreamingWithFiles() // Create CodeInterpreter tool with uploaded file IDs ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds))); - ResponseCreationOptions responseOptions = new() + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Load the CSV file and create a simple plot visualization showing the relationship between x and y values.")]) { Tools = { codeInterpreterTool }, }; - const string message = "Load the CSV file and create a simple plot visualization showing the relationship between x and y values."; - int inProgressCount = 0; int interpretingCount = 0; int codeDeltaCount = 0; @@ -650,7 +643,7 @@ public async Task CodeInterpreterToolStreamingWithFiles() StringBuilder codeBuilder = new StringBuilder(); await foreach (StreamingResponseUpdate update - in client.CreateResponseStreamingAsync(message, responseOptions)) + in client.CreateResponseStreamingAsync(responseOptions)) { ValidateCodeInterpreterEvent(ref inProgressCount, ref interpretingCount, ref codeDeltaCount, ref codeDoneCount, ref completedCount, ref gotFinishedCodeInterpreterItem, codeBuilder, update); } From df1aab0a0f3bcead3d8e1a248ab254b9f586ccea Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Mon, 10 Nov 2025 15:13:16 -0600 Subject: [PATCH 07/15] hide convenience APIs --- api/OpenAI.net8.0.cs | 105 +++--------------- api/OpenAI.netstandard2.0.cs | 101 +++-------------- src/Custom/Responses/CreateResponseOptions.cs | 10 +- src/Custom/Responses/OpenAIResponseClient.cs | 33 ++++-- src/Custom/Responses/ResponseResult.cs | 9 ++ tests/Responses/ResponsesTests.cs | 17 +-- ...ionMethodsWork(DeveloperMessage)Async.json | 78 +++++++------ ...ethodsWork(InstructionsProperty)Async.json | 68 +++++++----- ...uctionMethodsWork(SystemMessage)Async.json | 66 ++++++----- .../CanDeleteResponseAsync.json | 74 ++++++------ .../CanOptOutOfStorageAsync.json | 38 ++++--- .../FunctionCallStreamingWorksAsync.json | 42 +++---- .../ResponsesTests/WebSearchCallAsync.json | 61 +++++----- 13 files changed, 320 insertions(+), 382 deletions(-) diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 319d415c3..5fa9b2204 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -5077,7 +5077,6 @@ public class CreateResponseOptions : IJsonModel, IPersist public float? TopP { get; set; } public ResponseTruncationMode? Truncation { get; set; } public string User { get; set; } - public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false); protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -5576,41 +5575,6 @@ public enum MessageStatus { Incomplete = 2 } [Experimental("OPENAI001")] - public class OpenAIResponse : IJsonModel, IPersistableModel { - public bool? BackgroundModeEnabled { get; } - public DateTimeOffset CreatedAt { get; } - public string EndUserId { get; } - public ResponseError Error { get; } - public string Id { get; } - public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; } - public string Instructions { get; } - public int? MaxOutputTokenCount { get; } - public IDictionary Metadata { get; } - public string Model { get; } - public IList OutputItems { get; } - public bool ParallelToolCallsEnabled { get; } - [EditorBrowsable(EditorBrowsableState.Never)] - [Experimental("SCME0001")] - public ref JsonPatch Patch { get; } - public string PreviousResponseId { get; } - public ResponseReasoningOptions ReasoningOptions { get; } - public ResponseServiceTier? ServiceTier { get; } - public ResponseStatus? Status { get; } - public float? Temperature { get; } - public ResponseTextOptions TextOptions { get; } - public ResponseToolChoice ToolChoice { get; } - public IList Tools { get; } - public float? TopP { get; } - public ResponseTruncationMode? TruncationMode { get; } - public ResponseTokenUsage Usage { get; } - public string GetOutputText(); - protected virtual OpenAIResponse JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - public static explicit operator OpenAIResponse(ClientResult result); - protected virtual OpenAIResponse PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } - [Experimental("OPENAI001")] public class OpenAIResponseClient { protected OpenAIResponseClient(); protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options); @@ -5620,53 +5584,40 @@ public class OpenAIResponseClient { public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy); public OpenAIResponseClient(string model, string apiKey); [Experimental("OPENAI001")] - public Uri Endpoint { get; } + public virtual Uri Endpoint { get; } [Experimental("OPENAI001")] - public string Model { get; } + public virtual string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult CancelResponse(string responseId, RequestOptions options); - public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default); - public Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); - public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); } [Experimental("OPENAI001")] public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); - public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable tools = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null); public static ReferenceResponseItem ReferenceResponseItem(string id = null); @@ -5756,32 +5707,6 @@ public enum ResponseContentPartKind { Refusal = 5 } [Experimental("OPENAI001")] - public class ResponseCreationOptions : IJsonModel, IPersistableModel { - public bool? BackgroundModeEnabled { get; set; } - public string EndUserId { get; set; } - public string Instructions { get; set; } - public int? MaxOutputTokenCount { get; set; } - public IDictionary Metadata { get; } - public bool? ParallelToolCallsEnabled { get; set; } - [EditorBrowsable(EditorBrowsableState.Never)] - [Experimental("SCME0001")] - public ref JsonPatch Patch { get; } - public string PreviousResponseId { get; set; } - public ResponseReasoningOptions ReasoningOptions { get; set; } - public ResponseServiceTier? ServiceTier { get; set; } - public bool? StoredOutputEnabled { get; set; } - public float? Temperature { get; set; } - public ResponseTextOptions TextOptions { get; set; } - public ResponseToolChoice ToolChoice { get; set; } - public IList Tools { get; } - public float? TopP { get; set; } - public ResponseTruncationMode? TruncationMode { get; set; } - protected virtual ResponseCreationOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - protected virtual ResponseCreationOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } - [Experimental("OPENAI001")] public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } @@ -6060,6 +5985,7 @@ public class ResponseResult : IJsonModel, IPersistableModel Metadata { get; } + public string Model { get; } public string Object { get; } public IList Output { get; } public string OutputText { get; } @@ -6073,12 +5999,13 @@ public class ResponseResult : IJsonModel, IPersistableModel Tools { get; } public float? TopP { get; } public ResponseTruncationMode? Truncation { get; } public ResponseTokenUsage Usage { get; } public string User { get; } + public string GetOutputText(); protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static explicit operator ResponseResult(BinaryData data); @@ -6268,7 +6195,7 @@ public class StreamingResponseCodeInterpreterCallInterpretingUpdate : StreamingR } [Experimental("OPENAI001")] public class StreamingResponseCompletedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -6298,7 +6225,7 @@ public class StreamingResponseContentPartDoneUpdate : StreamingResponseUpdate, I } [Experimental("OPENAI001")] public class StreamingResponseCreatedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -6316,7 +6243,7 @@ public class StreamingResponseErrorUpdate : StreamingResponseUpdate, IJsonModel< } [Experimental("OPENAI001")] public class StreamingResponseFailedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -6409,7 +6336,7 @@ public class StreamingResponseImageGenerationCallPartialImageUpdate : StreamingR } [Experimental("OPENAI001")] public class StreamingResponseIncompleteUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -6417,7 +6344,7 @@ public class StreamingResponseIncompleteUpdate : StreamingResponseUpdate, IJsonM } [Experimental("OPENAI001")] public class StreamingResponseInProgressUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -6539,7 +6466,7 @@ public class StreamingResponseOutputTextDoneUpdate : StreamingResponseUpdate, IJ } [Experimental("OPENAI001")] public class StreamingResponseQueuedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index dac09b81d..e49a3d5a5 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -4438,7 +4438,6 @@ public class CreateResponseOptions : IJsonModel, IPersist public float? TopP { get; set; } public ResponseTruncationMode? Truncation { get; set; } public string User { get; set; } - public static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false); protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -4891,39 +4890,6 @@ public enum MessageStatus { Completed = 1, Incomplete = 2 } - public class OpenAIResponse : IJsonModel, IPersistableModel { - public bool? BackgroundModeEnabled { get; } - public DateTimeOffset CreatedAt { get; } - public string EndUserId { get; } - public ResponseError Error { get; } - public string Id { get; } - public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; } - public string Instructions { get; } - public int? MaxOutputTokenCount { get; } - public IDictionary Metadata { get; } - public string Model { get; } - public IList OutputItems { get; } - public bool ParallelToolCallsEnabled { get; } - [EditorBrowsable(EditorBrowsableState.Never)] - public ref JsonPatch Patch { get; } - public string PreviousResponseId { get; } - public ResponseReasoningOptions ReasoningOptions { get; } - public ResponseServiceTier? ServiceTier { get; } - public ResponseStatus? Status { get; } - public float? Temperature { get; } - public ResponseTextOptions TextOptions { get; } - public ResponseToolChoice ToolChoice { get; } - public IList Tools { get; } - public float? TopP { get; } - public ResponseTruncationMode? TruncationMode { get; } - public ResponseTokenUsage Usage { get; } - public string GetOutputText(); - protected virtual OpenAIResponse JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - public static explicit operator OpenAIResponse(ClientResult result); - protected virtual OpenAIResponse PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } public class OpenAIResponseClient { protected OpenAIResponseClient(); protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options); @@ -4932,51 +4898,38 @@ public class OpenAIResponseClient { public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options); public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy); public OpenAIResponseClient(string model, string apiKey); - public Uri Endpoint { get; } - public string Model { get; } + public virtual Uri Endpoint { get; } + public virtual string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult CancelResponse(string responseId, RequestOptions options); - public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual ClientResult GetResponse(string responseId, CancellationToken cancellationToken = default); - public Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual Task> GetResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual Task GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); - public AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); } public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); - public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary metadata = null, float? temperature = null, float? topP = null, ResponseServiceTier? serviceTier = null, string previousResponseId = null, bool? backgroundModeEnabled = null, string instructions = null, IEnumerable tools = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null); public static ReferenceResponseItem ReferenceResponseItem(string id = null); @@ -5056,30 +5009,6 @@ public enum ResponseContentPartKind { OutputText = 4, Refusal = 5 } - public class ResponseCreationOptions : IJsonModel, IPersistableModel { - public bool? BackgroundModeEnabled { get; set; } - public string EndUserId { get; set; } - public string Instructions { get; set; } - public int? MaxOutputTokenCount { get; set; } - public IDictionary Metadata { get; } - public bool? ParallelToolCallsEnabled { get; set; } - [EditorBrowsable(EditorBrowsableState.Never)] - public ref JsonPatch Patch { get; } - public string PreviousResponseId { get; set; } - public ResponseReasoningOptions ReasoningOptions { get; set; } - public ResponseServiceTier? ServiceTier { get; set; } - public bool? StoredOutputEnabled { get; set; } - public float? Temperature { get; set; } - public ResponseTextOptions TextOptions { get; set; } - public ResponseToolChoice ToolChoice { get; set; } - public IList Tools { get; } - public float? TopP { get; set; } - public ResponseTruncationMode? TruncationMode { get; set; } - protected virtual ResponseCreationOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - protected virtual ResponseCreationOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } @@ -5329,6 +5258,7 @@ public class ResponseResult : IJsonModel, IPersistableModel Metadata { get; } + public string Model { get; } public string Object { get; } public IList Output { get; } public string OutputText { get; } @@ -5341,12 +5271,13 @@ public class ResponseResult : IJsonModel, IPersistableModel Tools { get; } public float? TopP { get; } public ResponseTruncationMode? Truncation { get; } public ResponseTokenUsage Usage { get; } public string User { get; } + public string GetOutputText(); protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static explicit operator ResponseResult(BinaryData data); @@ -5514,7 +5445,7 @@ public class StreamingResponseCodeInterpreterCallInterpretingUpdate : StreamingR protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseCompletedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -5541,7 +5472,7 @@ public class StreamingResponseContentPartDoneUpdate : StreamingResponseUpdate, I protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseCreatedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -5557,7 +5488,7 @@ public class StreamingResponseErrorUpdate : StreamingResponseUpdate, IJsonModel< protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseFailedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -5640,14 +5571,14 @@ public class StreamingResponseImageGenerationCallPartialImageUpdate : StreamingR protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseIncompleteUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseInProgressUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); @@ -5756,7 +5687,7 @@ public class StreamingResponseOutputTextDoneUpdate : StreamingResponseUpdate, IJ protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseQueuedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { - public OpenAIResponse Response { get; } + public ResponseResult Response { get; } protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index e5479427d..f202c6c87 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -86,7 +86,7 @@ internal CreateResponseOptions(IDictionary metadata, float? temp public ResponseTruncationMode? Truncation { get; set; } - public IList Input { get; } + public IList Input { get; internal set; } public IList Include { get; set; } @@ -125,5 +125,13 @@ [.. responseCreationOptions.Include.Select(x => x.ToIncludable())], responseCreationOptions.Stream, new JsonPatch()); } + + internal CreateResponseOptions GetClone() + { + CreateResponseOptions copiedOptions = (CreateResponseOptions)this.MemberwiseClone(); + copiedOptions.Patch = _patch; + + return copiedOptions; + } } } diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index aa4568f2c..d3eb0af45 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -117,13 +117,13 @@ protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, O /// Gets the endpoint URI for the service. /// [Experimental("OPENAI001")] - public Uri Endpoint => _endpoint; + public virtual Uri Endpoint => _endpoint; /// /// Gets the name of the model used in requests sent to the service. /// [Experimental("OPENAI001")] - public string Model => _model; + public virtual string Model => _model; internal virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { @@ -180,7 +180,7 @@ public virtual ClientResult CreateResponse(CreateResponseOptions { Argument.AssertNotNull(options, nameof(options)); - ClientResult result = this.CreateResponse(options, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null); + ClientResult result = this.CreateResponse(CreatePerCallOptions(options), cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null); return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } @@ -188,7 +188,7 @@ public virtual async Task> CreateResponseAsync(Crea { Argument.AssertNotNull(options, nameof(options)); - ClientResult result = await this.CreateResponseAsync(options, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null).ConfigureAwait(false); + ClientResult result = await this.CreateResponseAsync(CreatePerCallOptions(options), cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null).ConfigureAwait(false); return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } @@ -215,7 +215,7 @@ internal AsyncCollectionResult CreateResponseStreamingA public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default) { - return CreateResponseStreamingAsync(options, cancellationToken.ToRequestOptions(streaming: true)); + return CreateResponseStreamingAsync(CreatePerCallOptions(options, true), cancellationToken.ToRequestOptions(streaming: true)); } internal AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, RequestOptions requestOptions) @@ -249,7 +249,7 @@ public virtual CollectionResult CreateResponseStreaming Argument.AssertNotNull(options, nameof(options)); return new SseUpdateCollection( - () => CreateResponse(options, cancellationToken.ToRequestOptions(streaming: true)), + () => CreateResponse(CreatePerCallOptions(options, true), cancellationToken.ToRequestOptions(streaming: true)), StreamingResponseUpdate.DeserializeStreamingResponseUpdate, cancellationToken); } @@ -438,7 +438,26 @@ internal virtual ResponseCreationOptions CreatePerCallOptions(ResponseCreationOp : userOptions.GetClone(); copiedOptions.Input = inputItems.ToList(); - copiedOptions.Model = _model; + copiedOptions.Model = Model; + + if (stream) + { + copiedOptions.Stream = true; + } + + return copiedOptions; + } + + internal virtual CreateResponseOptions CreatePerCallOptions(CreateResponseOptions userOptions, bool stream = false) + { + CreateResponseOptions copiedOptions = userOptions is null + ? new() + : userOptions.GetClone(); + + if (copiedOptions.Model is null) + { + copiedOptions.Model = Model; + } if (stream) { diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs index 63b07545c..af0f79c7e 100644 --- a/src/Custom/Responses/ResponseResult.cs +++ b/src/Custom/Responses/ResponseResult.cs @@ -118,5 +118,14 @@ internal ResponseResult(IDictionary metadata, float? temperature public ResponseTokenUsage Usage { get; } public bool ParallelToolCalls { get; } + + public string GetOutputText() + { + IEnumerable outputTextSegments = Output.Where(item => item is InternalResponsesAssistantMessage) + .Select(item => item as InternalResponsesAssistantMessage) + .SelectMany(message => message.Content.Where(contentPart => contentPart.Kind == ResponseContentPartKind.OutputText) + .Select(outputTextPart => outputTextPart.Text)); + return outputTextSegments.Any() ? string.Concat(outputTextSegments) : null; + } } } diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index 141ea3711..c9e612fd0 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -488,7 +488,9 @@ public async Task StreamingResponses() } else if (update is StreamingResponseCompletedUpdate responseCompletedUpdate) { - finalResponseText = responseCompletedUpdate.Response.OutputText; + finalResponseText = responseCompletedUpdate.Response.Output[0] is MessageResponseItem messageItem + ? messageItem.Content[0].Text + : null; } } Assert.That(deltaTextSegments, Has.Count.GreaterThan(0)); @@ -664,12 +666,13 @@ public async Task OutputTextMethod() { OpenAIResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new ([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); - Assert.That(response?.OutputText?.Length, Is.GreaterThan(0).And.LessThan(7)); - Assert.That(response?.OutputText?.ToLower(), Does.Contain("hello")); + new([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); + var outputText = response.GetOutputText(); + Assert.That(outputText.Length, Is.GreaterThan(0).And.LessThan(7)); + Assert.That(outputText.ToLower(), Does.Contain("hello")); response.Output.Add(ResponseItem.CreateAssistantMessageItem("More text!")); - Assert.That(response?.OutputText?.ToLower(), Does.EndWith("more text!")); + Assert.That(response.GetOutputText().ToLower(), Does.EndWith("more text!")); response = await client.CreateResponseAsync( new ([ResponseItem.CreateUserMessageItem("How's the weather?")]) @@ -751,7 +754,7 @@ public async Task FileInputFromIdWorks() ResponseResult response = await client.CreateResponseAsync(new([messageItem])); - Assert.That(response?.OutputText?.ToLower(), Does.Contain("pizza")); + Assert.That(response?.GetOutputText().ToLower(), Does.Contain("pizza")); } [RecordedTest] @@ -771,7 +774,7 @@ public async Task FileInputFromBinaryWorks() ResponseResult response = await client.CreateResponseAsync(new([messageItem])); - Assert.That(response?.OutputText?.ToLower(), Does.Contain("pizza")); + Assert.That(response?.GetOutputText(), Does.Contain("pizza")); } [RecordedTest] diff --git a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(DeveloperMessage)Async.json b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(DeveloperMessage)Async.json index 936e98cd0..ecc9b0e22 100644 --- a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(DeveloperMessage)Async.json +++ b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(DeveloperMessage)Async.json @@ -8,7 +8,7 @@ "Authorization": "Sanitized", "Content-Length": "248", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -39,19 +39,19 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804101109e7ba46-SEA", + "CF-RAY": "99c8407c7feb2c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1402", + "Content-Length": "1487", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:55 GMT", + "Date": "Mon, 10 Nov 2025 20:20:43 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "769", + "openai-processing-ms": "1532", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "772", + "x-envoy-upstream-service-time": "1539", "x-ratelimit-limit-requests": "30000", "x-ratelimit-limit-tokens": "150000000", "x-ratelimit-remaining-requests": "29999", @@ -61,11 +61,14 @@ "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_00d0e225bdd1d9770068c9ef6e4cf88196a42768ef39282d67", + "id": "resp_002dcc4e4a926cdc006912491986b4819dad1389a617089eb9", "object": "response", - "created_at": 1758064494, + "created_at": 1762806041, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -74,7 +77,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_00d0e225bdd1d9770068c9ef6ea2fc81969eb0316f21bc531e", + "id": "msg_002dcc4e4a926cdc006912491a834c819dad81a02dc2376e19", "type": "message", "status": "completed", "content": [ @@ -82,7 +85,7 @@ "type": "output_text", "annotations": [], "logprobs": [], - "text": "Arr, matey! How can I assist ye today?" + "text": "Arr, matey! What brings ye to this fine day?" } ], "role": "assistant" @@ -91,6 +94,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -115,50 +119,53 @@ "input_tokens_details": { "cached_tokens": 0 }, - "output_tokens": 13, + "output_tokens": 14, "output_tokens_details": { "reasoning_tokens": 0 }, - "total_tokens": 39 + "total_tokens": 40 }, "user": null, "metadata": {} } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_00d0e225bdd1d9770068c9ef6e4cf88196a42768ef39282d67", + "RequestUri": "https://api.openai.com/v1/responses/resp_002dcc4e4a926cdc006912491986b4819dad1389a617089eb9?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980410165ee8ba46-SEA", + "CF-RAY": "99c8408969952c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1402", + "Content-Length": "1487", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:55 GMT", + "Date": "Mon, 10 Nov 2025 20:20:43 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "132", + "openai-processing-ms": "85", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "137", + "x-envoy-upstream-service-time": "91", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_00d0e225bdd1d9770068c9ef6e4cf88196a42768ef39282d67", + "id": "resp_002dcc4e4a926cdc006912491986b4819dad1389a617089eb9", "object": "response", - "created_at": 1758064494, + "created_at": 1762806041, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -167,7 +174,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_00d0e225bdd1d9770068c9ef6ea2fc81969eb0316f21bc531e", + "id": "msg_002dcc4e4a926cdc006912491a834c819dad81a02dc2376e19", "type": "message", "status": "completed", "content": [ @@ -175,7 +182,7 @@ "type": "output_text", "annotations": [], "logprobs": [], - "text": "Arr, matey! How can I assist ye today?" + "text": "Arr, matey! What brings ye to this fine day?" } ], "role": "assistant" @@ -184,6 +191,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -208,49 +216,49 @@ "input_tokens_details": { "cached_tokens": 0 }, - "output_tokens": 13, + "output_tokens": 14, "output_tokens_details": { "reasoning_tokens": 0 }, - "total_tokens": 39 + "total_tokens": 40 }, "user": null, "metadata": {} } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_00d0e225bdd1d9770068c9ef6e4cf88196a42768ef39282d67/input_items", + "RequestUri": "https://api.openai.com/v1/responses/resp_002dcc4e4a926cdc006912491986b4819dad1389a617089eb9/input_items", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "98041017c81cba46-SEA", + "CF-RAY": "99c8408cfe212c9d-DFW", "Connection": "keep-alive", "Content-Length": "781", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:55 GMT", + "Date": "Mon, 10 Nov 2025 20:20:43 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "131", + "openai-processing-ms": "91", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "135", + "x-envoy-upstream-service-time": "95", "X-Request-ID": "Sanitized" }, "ResponseBody": { "object": "list", "data": [ { - "id": "msg_00d0e225bdd1d9770068c9ef6e54cc8196a7520acce6f350d5", + "id": "msg_002dcc4e4a926cdc00691249198ac0819da9a1f2c7c651fa3b", "type": "message", "status": "completed", "content": [ @@ -262,7 +270,7 @@ "role": "user" }, { - "id": "msg_00d0e225bdd1d9770068c9ef6e54b8819683fb7d418ebd5530", + "id": "msg_002dcc4e4a926cdc00691249198aa8819d974fb5a3cbd7bf3e", "type": "message", "status": "completed", "content": [ @@ -274,9 +282,9 @@ "role": "developer" } ], - "first_id": "msg_00d0e225bdd1d9770068c9ef6e54cc8196a7520acce6f350d5", + "first_id": "msg_002dcc4e4a926cdc00691249198ac0819da9a1f2c7c651fa3b", "has_more": false, - "last_id": "msg_00d0e225bdd1d9770068c9ef6e54b8819683fb7d418ebd5530" + "last_id": "msg_002dcc4e4a926cdc00691249198aa8819d974fb5a3cbd7bf3e" } } ], diff --git a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(InstructionsProperty)Async.json b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(InstructionsProperty)Async.json index 4a3e2fd6a..39588a275 100644 --- a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(InstructionsProperty)Async.json +++ b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(InstructionsProperty)Async.json @@ -8,7 +8,7 @@ "Authorization": "Sanitized", "Content-Length": "184", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -30,19 +30,23 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "98040fff7c2dba46-SEA", + "CF-RAY": "99c84047edeb2c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1433", + "Content-Length": "1512", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:52 GMT", + "Date": "Mon, 10 Nov 2025 20:20:35 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "869", + "openai-processing-ms": "2665", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", + "Set-Cookie": [ + "Sanitized", + "Sanitized" + ], "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "874", + "x-envoy-upstream-service-time": "2670", "x-ratelimit-limit-requests": "30000", "x-ratelimit-limit-tokens": "150000000", "x-ratelimit-remaining-requests": "29999", @@ -52,11 +56,14 @@ "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0e9d546061ee22b70068c9ef6b7f348195bc56626ea1118b08", + "id": "resp_0e6c4adc7b091afb00691249110f0c819fa0a9c38e350ae71d", "object": "response", - "created_at": 1758064491, + "created_at": 1762806033, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": "Always begin your replies with 'Arr, matey'", @@ -65,7 +72,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_0e9d546061ee22b70068c9ef6be8bc81958958a3cbdd2b10a9", + "id": "msg_0e6c4adc7b091afb0069124911b2a8819f9c02a1e1f9a6b901", "type": "message", "status": "completed", "content": [ @@ -82,6 +89,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -117,39 +125,42 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_0e9d546061ee22b70068c9ef6b7f348195bc56626ea1118b08", + "RequestUri": "https://api.openai.com/v1/responses/resp_0e6c4adc7b091afb00691249110f0c819fa0a9c38e350ae71d?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980410057897ba46-SEA", + "CF-RAY": "99c8405c1e092c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1433", + "Content-Length": "1512", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:52 GMT", + "Date": "Mon, 10 Nov 2025 20:20:35 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "127", + "openai-processing-ms": "52", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "131", + "x-envoy-upstream-service-time": "56", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0e9d546061ee22b70068c9ef6b7f348195bc56626ea1118b08", + "id": "resp_0e6c4adc7b091afb00691249110f0c819fa0a9c38e350ae71d", "object": "response", - "created_at": 1758064491, + "created_at": 1762806033, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": "Always begin your replies with 'Arr, matey'", @@ -158,7 +169,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_0e9d546061ee22b70068c9ef6be8bc81958958a3cbdd2b10a9", + "id": "msg_0e6c4adc7b091afb0069124911b2a8819f9c02a1e1f9a6b901", "type": "message", "status": "completed", "content": [ @@ -175,6 +186,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -210,38 +222,38 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_0e9d546061ee22b70068c9ef6b7f348195bc56626ea1118b08/input_items", + "RequestUri": "https://api.openai.com/v1/responses/resp_0e6c4adc7b091afb00691249110f0c819fa0a9c38e350ae71d/input_items", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "98041006e9d2ba46-SEA", + "CF-RAY": "99c8405cff282c9d-DFW", "Connection": "keep-alive", "Content-Length": "474", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:52 GMT", + "Date": "Mon, 10 Nov 2025 20:20:36 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "99", + "openai-processing-ms": "273", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "102", + "x-envoy-upstream-service-time": "276", "X-Request-ID": "Sanitized" }, "ResponseBody": { "object": "list", "data": [ { - "id": "msg_0e9d546061ee22b70068c9ef6b8a188195b2406d8cfa05d19c", + "id": "msg_0e6c4adc7b091afb00691249111030819fa509111363c70e43", "type": "message", "status": "completed", "content": [ @@ -253,9 +265,9 @@ "role": "user" } ], - "first_id": "msg_0e9d546061ee22b70068c9ef6b8a188195b2406d8cfa05d19c", + "first_id": "msg_0e6c4adc7b091afb00691249111030819fa509111363c70e43", "has_more": false, - "last_id": "msg_0e9d546061ee22b70068c9ef6b8a188195b2406d8cfa05d19c" + "last_id": "msg_0e6c4adc7b091afb00691249111030819fa509111363c70e43" } } ], diff --git a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(SystemMessage)Async.json b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(SystemMessage)Async.json index 6d8a89760..7fe2ec87c 100644 --- a/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(SystemMessage)Async.json +++ b/tests/SessionRecords/ResponsesTests/AllInstructionMethodsWork(SystemMessage)Async.json @@ -8,7 +8,7 @@ "Authorization": "Sanitized", "Content-Length": "245", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -39,19 +39,19 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980410084af3ba46-SEA", + "CF-RAY": "99c84063cfc22c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1392", + "Content-Length": "1471", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:53 GMT", + "Date": "Mon, 10 Nov 2025 20:20:39 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "686", + "openai-processing-ms": "2310", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "689", + "x-envoy-upstream-service-time": "2312", "x-ratelimit-limit-requests": "30000", "x-ratelimit-limit-tokens": "150000000", "x-ratelimit-remaining-requests": "29999", @@ -61,11 +61,14 @@ "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0299eba013e54bff0068c9ef6ce8cc819389aea0a5bdf849de", + "id": "resp_000d5fc0a1c5522f00691249158260819e9b956f728c617313", "object": "response", - "created_at": 1758064492, + "created_at": 1762806037, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -74,7 +77,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_0299eba013e54bff0068c9ef6d38788193a39ece140726f24b", + "id": "msg_000d5fc0a1c5522f0069124916ff78819e892b7eb633f3854f", "type": "message", "status": "completed", "content": [ @@ -91,6 +94,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -126,39 +130,42 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_0299eba013e54bff0068c9ef6ce8cc819389aea0a5bdf849de", + "RequestUri": "https://api.openai.com/v1/responses/resp_000d5fc0a1c5522f00691249158260819e9b956f728c617313?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804100d3ee2ba46-SEA", + "CF-RAY": "99c84075cdc32c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1392", + "Content-Length": "1471", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:53 GMT", + "Date": "Mon, 10 Nov 2025 20:20:40 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "153", + "openai-processing-ms": "138", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "157", + "x-envoy-upstream-service-time": "145", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0299eba013e54bff0068c9ef6ce8cc819389aea0a5bdf849de", + "id": "resp_000d5fc0a1c5522f00691249158260819e9b956f728c617313", "object": "response", - "created_at": 1758064492, + "created_at": 1762806037, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -167,7 +174,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_0299eba013e54bff0068c9ef6d38788193a39ece140726f24b", + "id": "msg_000d5fc0a1c5522f0069124916ff78819e892b7eb633f3854f", "type": "message", "status": "completed", "content": [ @@ -184,6 +191,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -219,38 +227,38 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_0299eba013e54bff0068c9ef6ce8cc819389aea0a5bdf849de/input_items", + "RequestUri": "https://api.openai.com/v1/responses/resp_000d5fc0a1c5522f00691249158260819e9b956f728c617313/input_items", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804100ed852ba46-SEA", + "CF-RAY": "99c84079abdc2c9d-DFW", "Connection": "keep-alive", "Content-Length": "778", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:14:54 GMT", + "Date": "Mon, 10 Nov 2025 20:20:40 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "190", + "openai-processing-ms": "211", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "195", + "x-envoy-upstream-service-time": "215", "X-Request-ID": "Sanitized" }, "ResponseBody": { "object": "list", "data": [ { - "id": "msg_0299eba013e54bff0068c9ef6ceeb48193add63cb6b6ce29ae", + "id": "msg_000d5fc0a1c5522f006912491583b0819e8262b71593b89286", "type": "message", "status": "completed", "content": [ @@ -262,7 +270,7 @@ "role": "user" }, { - "id": "msg_0299eba013e54bff0068c9ef6ceea88193b14b088d798b8068", + "id": "msg_000d5fc0a1c5522f006912491583a0819ebb582a8129a648db", "type": "message", "status": "completed", "content": [ @@ -274,9 +282,9 @@ "role": "system" } ], - "first_id": "msg_0299eba013e54bff0068c9ef6ceeb48193add63cb6b6ce29ae", + "first_id": "msg_000d5fc0a1c5522f006912491583b0819e8262b71593b89286", "has_more": false, - "last_id": "msg_0299eba013e54bff0068c9ef6ceea88193b14b088d798b8068" + "last_id": "msg_000d5fc0a1c5522f006912491583a0819ebb582a8129a648db" } } ], diff --git a/tests/SessionRecords/ResponsesTests/CanDeleteResponseAsync.json b/tests/SessionRecords/ResponsesTests/CanDeleteResponseAsync.json index 076d62209..c65a03cf7 100644 --- a/tests/SessionRecords/ResponsesTests/CanDeleteResponseAsync.json +++ b/tests/SessionRecords/ResponsesTests/CanDeleteResponseAsync.json @@ -8,7 +8,7 @@ "Authorization": "Sanitized", "Content-Length": "123", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -29,19 +29,19 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "98041050d8d5ba46-SEA", + "CF-RAY": "99c840919b2a2c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1398", + "Content-Length": "1477", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:05 GMT", + "Date": "Mon, 10 Nov 2025 20:20:45 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "711", + "openai-processing-ms": "518", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "716", + "x-envoy-upstream-service-time": "521", "x-ratelimit-limit-requests": "30000", "x-ratelimit-limit-tokens": "150000000", "x-ratelimit-remaining-requests": "29999", @@ -51,11 +51,14 @@ "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "id": "resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432", "object": "response", - "created_at": 1758064504, + "created_at": 1762806044, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -64,7 +67,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_08a30bb95515b60d0068c9ef78ef0881909e7f6d3107c4372d", + "id": "msg_07a1ad1767d55063006912491cbc3c819c88d3a4450d7f11f9", "type": "message", "status": "completed", "content": [ @@ -81,6 +84,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -116,39 +120,42 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "RequestUri": "https://api.openai.com/v1/responses/resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980410560d09ba46-SEA", + "CF-RAY": "99c840961fcf2c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1398", + "Content-Length": "1477", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:05 GMT", + "Date": "Mon, 10 Nov 2025 20:20:45 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "99", + "openai-processing-ms": "48", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "104", + "x-envoy-upstream-service-time": "51", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "id": "resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432", "object": "response", - "created_at": 1758064504, + "created_at": 1762806044, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -157,7 +164,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_08a30bb95515b60d0068c9ef78ef0881909e7f6d3107c4372d", + "id": "msg_07a1ad1767d55063006912491cbc3c819c88d3a4450d7f11f9", "type": "message", "status": "completed", "content": [ @@ -174,6 +181,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -209,70 +217,70 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "RequestUri": "https://api.openai.com/v1/responses/resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432", "RequestMethod": "DELETE", "RequestHeaders": { "Accept": "application/json", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 200, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980410573e03ba46-SEA", + "CF-RAY": "99c84096d8942c9d-DFW", "Connection": "keep-alive", "Content-Length": "120", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:05 GMT", + "Date": "Mon, 10 Nov 2025 20:20:45 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "164", + "openai-processing-ms": "252", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "168", + "x-envoy-upstream-service-time": "258", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "id": "resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432", "object": "response.deleted", "deleted": true } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e", + "RequestUri": "https://api.openai.com/v1/responses/resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 404, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "98041058cf53ba46-SEA", + "CF-RAY": "99c84098fac82c9d-DFW", "Connection": "keep-alive", "Content-Length": "197", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:05 GMT", + "Date": "Mon, 10 Nov 2025 20:20:46 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "70", + "openai-processing-ms": "425", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "73", + "x-envoy-upstream-service-time": "465", "X-Request-ID": "Sanitized" }, "ResponseBody": { "error": { - "message": "Response with id 'resp_08a30bb95515b60d0068c9ef788618819096ed807dca9a6e3e' not found.", + "message": "Response with id 'resp_07a1ad1767d55063006912491c79cc819cba311f57f16b0432' not found.", "type": "invalid_request_error", "param": null, "code": null diff --git a/tests/SessionRecords/ResponsesTests/CanOptOutOfStorageAsync.json b/tests/SessionRecords/ResponsesTests/CanOptOutOfStorageAsync.json index 31a31b191..030f3015d 100644 --- a/tests/SessionRecords/ResponsesTests/CanOptOutOfStorageAsync.json +++ b/tests/SessionRecords/ResponsesTests/CanOptOutOfStorageAsync.json @@ -8,7 +8,7 @@ "Authorization": "Sanitized", "Content-Length": "137", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -30,33 +30,36 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804105a289eba46-SEA", + "CF-RAY": "99c8409f79692c9d-DFW", "Connection": "keep-alive", - "Content-Length": "1399", + "Content-Length": "1478", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:06 GMT", + "Date": "Mon, 10 Nov 2025 20:20:47 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "713", + "openai-processing-ms": "444", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "722", + "x-envoy-upstream-service-time": "449", "x-ratelimit-limit-requests": "30000", "x-ratelimit-limit-tokens": "150000000", "x-ratelimit-remaining-requests": "29999", - "x-ratelimit-remaining-tokens": "149999970", + "x-ratelimit-remaining-tokens": "149999972", "x-ratelimit-reset-requests": "2ms", "x-ratelimit-reset-tokens": "0s", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0dcd707b1e1966ec0168c9ef7a05148190ab7b43eff69da61a", + "id": "resp_0a1a67e2dcf43d1d016912491eaff08191929d563d272bd37e", "object": "response", - "created_at": 1758064506, + "created_at": 1762806046, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -65,7 +68,7 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "msg_0dcd707b1e1966ec0168c9ef7a907c8190878ee15359fe44b1", + "id": "msg_0a1a67e2dcf43d1d016912491efe6881918dc99da6309c860f", "type": "message", "status": "completed", "content": [ @@ -82,6 +85,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -117,36 +121,36 @@ } }, { - "RequestUri": "https://api.openai.com/v1/responses/resp_0dcd707b1e1966ec0168c9ef7a05148190ab7b43eff69da61a", + "RequestUri": "https://api.openai.com/v1/responses/resp_0a1a67e2dcf43d1d016912491eaff08191929d563d272bd37e?stream=false", "RequestMethod": "GET", "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": null, "StatusCode": 404, "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804105f4d36ba46-SEA", + "CF-RAY": "99c840a2ccce2c9d-DFW", "Connection": "keep-alive", "Content-Length": "197", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:07 GMT", + "Date": "Mon, 10 Nov 2025 20:20:47 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "317", + "openai-processing-ms": "98", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "395", + "x-envoy-upstream-service-time": "101", "X-Request-ID": "Sanitized" }, "ResponseBody": { "error": { - "message": "Response with id 'resp_0dcd707b1e1966ec0168c9ef7a05148190ab7b43eff69da61a' not found.", + "message": "Response with id 'resp_0a1a67e2dcf43d1d016912491eaff08191929d563d272bd37e' not found.", "type": "invalid_request_error", "param": null, "code": null diff --git a/tests/SessionRecords/ResponsesTests/FunctionCallStreamingWorksAsync.json b/tests/SessionRecords/ResponsesTests/FunctionCallStreamingWorksAsync.json index 6788d6016..feafed282 100644 --- a/tests/SessionRecords/ResponsesTests/FunctionCallStreamingWorksAsync.json +++ b/tests/SessionRecords/ResponsesTests/FunctionCallStreamingWorksAsync.json @@ -6,9 +6,9 @@ "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "Content-Length": "589", + "Content-Length": "583", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -46,7 +46,7 @@ "content": [ { "type": "input_text", - "text": "What should I wear for the weather in San Francisco right now?" + "text": "What should I wear for the weather in San Francisco, CA?" } ] } @@ -57,46 +57,50 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "9804108aa983ba46-SEA", + "CF-RAY": "99c840a59fc22c9d-DFW", "Connection": "keep-alive", "Content-Type": "text/event-stream; charset=utf-8", - "Date": "Tue, 16 Sep 2025 23:15:13 GMT", + "Date": "Mon, 10 Nov 2025 20:20:47 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "57", + "openai-processing-ms": "72", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "Transfer-Encoding": "chunked", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "62", + "x-envoy-upstream-service-time": "75", "X-Request-ID": "Sanitized" }, "ResponseBody": [ "event: response.created\n", - "data: {\"type\":\"response.created\",\"sequence_number\":0,\"response\":{\"id\":\"resp_09b0f9b2a5804b970068c9ef81c3d88194bc5f59ecddfbc9c4\",\"object\":\"response\",\"created_at\":1758064513,\"status\":\"in_progress\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"auto\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":null,\"user\":null,\"metadata\":{}}}\n\n", + "data: {\"type\":\"response.created\",\"sequence_number\":0,\"response\":{\"id\":\"resp_092e49e68d1e23ec006912491fa9ac819eb7ca78e4ba686d75\",\"object\":\"response\",\"created_at\":1762806047,\"status\":\"in_progress\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"prompt_cache_retention\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"auto\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":null,\"user\":null,\"metadata\":{}}}\n\n", "event: response.in_progress\n", - "data: {\"type\":\"response.in_progress\",\"sequence_number\":1,\"response\":{\"id\":\"resp_09b0f9b2a5804b970068c9ef81c3d88194bc5f59ecddfbc9c4\",\"object\":\"response\",\"created_at\":1758064513,\"status\":\"in_progress\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"auto\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":null,\"user\":null,\"metadata\":{}}}\n\n", + "data: {\"type\":\"response.in_progress\",\"sequence_number\":1,\"response\":{\"id\":\"resp_092e49e68d1e23ec006912491fa9ac819eb7ca78e4ba686d75\",\"object\":\"response\",\"created_at\":1762806047,\"status\":\"in_progress\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"prompt_cache_retention\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"auto\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":null,\"user\":null,\"metadata\":{}}}\n\n", "event: response.output_item.added\n", - "data: {\"type\":\"response.output_item.added\",\"sequence_number\":2,\"output_index\":0,\"item\":{\"id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"type\":\"function_call\",\"status\":\"in_progress\",\"arguments\":\"\",\"call_id\":\"call_NnxHj2FpeF2SlFkJ0YpNC46X\",\"name\":\"get_weather_at_location\"}}\n\n", + "data: {\"type\":\"response.output_item.added\",\"sequence_number\":2,\"output_index\":0,\"item\":{\"id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"type\":\"function_call\",\"status\":\"in_progress\",\"arguments\":\"\",\"call_id\":\"call_C9WzZ6slcrcsQjUphRCUIvSi\",\"name\":\"get_weather_at_location\"}}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":3,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\"{\\\"\",\"obfuscation\":\"nkLspmohHv8f0t\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":3,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\"{\\\"\",\"obfuscation\":\"5EchOENfPoTbKV\"}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":4,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\"location\",\"obfuscation\":\"1qchfmqs\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":4,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\"location\",\"obfuscation\":\"DHqWLUKz\"}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":5,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\"\\\":\\\"\",\"obfuscation\":\"6jlXOe997dFJQ\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":5,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\"\\\":\\\"\",\"obfuscation\":\"i8KWkM9ZaLrRo\"}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":6,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\"San\",\"obfuscation\":\"D7btEv5YpXeWr\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":6,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\"San\",\"obfuscation\":\"fYR52XmIptPjR\"}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":7,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\" Francisco\",\"obfuscation\":\"W0TgNI\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":7,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\" Francisco\",\"obfuscation\":\"wJpJbk\"}\n\n", "event: response.function_call_arguments.delta\n", - "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":8,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"delta\":\"\\\"}\",\"obfuscation\":\"n8lO1QqzSIfFz8\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":8,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\",\",\"obfuscation\":\"nk7MR81QwG1HLeE\"}\n\n", + "event: response.function_call_arguments.delta\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":9,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\" CA\",\"obfuscation\":\"mRMpuE5jnCU9H\"}\n\n", + "event: response.function_call_arguments.delta\n", + "data: {\"type\":\"response.function_call_arguments.delta\",\"sequence_number\":10,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"delta\":\"\\\"}\",\"obfuscation\":\"L35jOLQqT8P9iH\"}\n\n", "event: response.function_call_arguments.done\n", - "data: {\"type\":\"response.function_call_arguments.done\",\"sequence_number\":9,\"item_id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"output_index\":0,\"arguments\":\"{\\\"location\\\":\\\"San Francisco\\\"}\"}\n\n", + "data: {\"type\":\"response.function_call_arguments.done\",\"sequence_number\":11,\"item_id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"output_index\":0,\"arguments\":\"{\\\"location\\\":\\\"San Francisco, CA\\\"}\"}\n\n", "event: response.output_item.done\n", - "data: {\"type\":\"response.output_item.done\",\"sequence_number\":10,\"output_index\":0,\"item\":{\"id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"type\":\"function_call\",\"status\":\"completed\",\"arguments\":\"{\\\"location\\\":\\\"San Francisco\\\"}\",\"call_id\":\"call_NnxHj2FpeF2SlFkJ0YpNC46X\",\"name\":\"get_weather_at_location\"}}\n\n", + "data: {\"type\":\"response.output_item.done\",\"sequence_number\":12,\"output_index\":0,\"item\":{\"id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"type\":\"function_call\",\"status\":\"completed\",\"arguments\":\"{\\\"location\\\":\\\"San Francisco, CA\\\"}\",\"call_id\":\"call_C9WzZ6slcrcsQjUphRCUIvSi\",\"name\":\"get_weather_at_location\"}}\n\n", "event: response.completed\n", - "data: {\"type\":\"response.completed\",\"sequence_number\":11,\"response\":{\"id\":\"resp_09b0f9b2a5804b970068c9ef81c3d88194bc5f59ecddfbc9c4\",\"object\":\"response\",\"created_at\":1758064513,\"status\":\"completed\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[{\"id\":\"fc_09b0f9b2a5804b970068c9ef827c888194bf181e35c208ede1\",\"type\":\"function_call\",\"status\":\"completed\",\"arguments\":\"{\\\"location\\\":\\\"San Francisco\\\"}\",\"call_id\":\"call_NnxHj2FpeF2SlFkJ0YpNC46X\",\"name\":\"get_weather_at_location\"}],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"default\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":{\"input_tokens\":74,\"input_tokens_details\":{\"cached_tokens\":0},\"output_tokens\":18,\"output_tokens_details\":{\"reasoning_tokens\":0},\"total_tokens\":92},\"user\":null,\"metadata\":{}}}\n\n" + "data: {\"type\":\"response.completed\",\"sequence_number\":13,\"response\":{\"id\":\"resp_092e49e68d1e23ec006912491fa9ac819eb7ca78e4ba686d75\",\"object\":\"response\",\"created_at\":1762806047,\"status\":\"completed\",\"background\":false,\"error\":null,\"incomplete_details\":null,\"instructions\":null,\"max_output_tokens\":null,\"max_tool_calls\":null,\"model\":\"gpt-4o-mini-2024-07-18\",\"output\":[{\"id\":\"fc_092e49e68d1e23ec00691249202fd8819ea9c7316382c4e6c5\",\"type\":\"function_call\",\"status\":\"completed\",\"arguments\":\"{\\\"location\\\":\\\"San Francisco, CA\\\"}\",\"call_id\":\"call_C9WzZ6slcrcsQjUphRCUIvSi\",\"name\":\"get_weather_at_location\"}],\"parallel_tool_calls\":true,\"previous_response_id\":null,\"prompt_cache_key\":null,\"prompt_cache_retention\":null,\"reasoning\":{\"effort\":null,\"summary\":null},\"safety_identifier\":null,\"service_tier\":\"default\",\"store\":true,\"temperature\":1.0,\"text\":{\"format\":{\"type\":\"text\"},\"verbosity\":\"medium\"},\"tool_choice\":\"auto\",\"tools\":[{\"type\":\"function\",\"description\":\"Gets the weather at a specified location, optionally specifying units for temperature\",\"name\":\"get_weather_at_location\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"},\"unit\":{\"type\":\"string\",\"enum\":[\"C\",\"F\",\"K\"]}},\"required\":[\"location\"]},\"strict\":false}],\"top_logprobs\":0,\"top_p\":1.0,\"truncation\":\"disabled\",\"usage\":{\"input_tokens\":74,\"input_tokens_details\":{\"cached_tokens\":0},\"output_tokens\":20,\"output_tokens_details\":{\"reasoning_tokens\":0},\"total_tokens\":94},\"user\":null,\"metadata\":{}}}\n\n" ] } ], diff --git a/tests/SessionRecords/ResponsesTests/WebSearchCallAsync.json b/tests/SessionRecords/ResponsesTests/WebSearchCallAsync.json index ed43fdd2c..0522274e7 100644 --- a/tests/SessionRecords/ResponsesTests/WebSearchCallAsync.json +++ b/tests/SessionRecords/ResponsesTests/WebSearchCallAsync.json @@ -6,9 +6,9 @@ "RequestHeaders": { "Accept": "application/json, text/event-stream", "Authorization": "Sanitized", - "Content-Length": "236", + "Content-Length": "253", "Content-Type": "application/json", - "User-Agent": "OpenAI/2.4.0 (.NET 9.0.9; Microsoft Windows 10.0.26100)" + "User-Agent": "OpenAI/2.5.0 (.NET 9.0.10; Darwin 25.0.0 Darwin Kernel Version 25.0.0: Wed Sep 17 21:42:08 PDT 2025; root:xnu-12377.1.9~141/RELEASE_ARM64_T8132)" }, "RequestBody": { "model": "gpt-4o-mini", @@ -27,7 +27,7 @@ "content": [ { "type": "input_text", - "text": "What was a positive news story from today?" + "text": "Searching the internet, what's the weather like in Seattle?" } ] } @@ -37,27 +37,30 @@ "ResponseHeaders": { "Alt-Svc": "h3=\":443\"", "cf-cache-status": "DYNAMIC", - "CF-RAY": "980411430b9eba46-SEA", + "CF-RAY": "99c8411c99072c9d-DFW", "Connection": "keep-alive", - "Content-Length": "4732", + "Content-Length": "4743", "Content-Type": "application/json", - "Date": "Tue, 16 Sep 2025 23:15:46 GMT", + "Date": "Mon, 10 Nov 2025 20:21:09 GMT", "openai-organization": "Sanitized", - "openai-processing-ms": "3158", + "openai-processing-ms": "3206", "openai-project": "Sanitized", "openai-version": "2020-10-01", "Server": "cloudflare", "Strict-Transport-Security": "max-age=31536000; includeSubDomains; preload", "X-Content-Type-Options": "nosniff", - "x-envoy-upstream-service-time": "3163", + "x-envoy-upstream-service-time": "3212", "X-Request-ID": "Sanitized" }, "ResponseBody": { - "id": "resp_0371fa63edfcc1ef0068c9ef9f47cc8193966cbe9073775d43", + "id": "resp_00dd7135ab0a74ed0069124932b61c81a09e0f07f727ada951", "object": "response", - "created_at": 1758064543, + "created_at": 1762806066, "status": "completed", "background": false, + "billing": { + "payer": "developer" + }, "error": null, "incomplete_details": null, "instructions": null, @@ -66,16 +69,16 @@ "model": "gpt-4o-mini-2024-07-18", "output": [ { - "id": "ws_0371fa63edfcc1ef0068c9ef9f5a888193974c26f9a550a83d", + "id": "ws_00dd7135ab0a74ed0069124932c74c81a09aa5d264888f6b3f", "type": "web_search_call", "status": "completed", "action": { "type": "search", - "query": "What was a positive news story from today?" + "query": "Searching the internet, what's the weather like in Seattle?" } }, { - "id": "msg_0371fa63edfcc1ef0068c9ef9ff7ec81939c7464b3909d564a", + "id": "msg_00dd7135ab0a74ed0069124934380c81a09241c6ba316550bb", "type": "message", "status": "completed", "content": [ @@ -84,28 +87,21 @@ "annotations": [ { "type": "url_citation", - "end_index": 595, - "start_index": 482, - "title": "Good News This Week: September 13, 2025 - LEGO, Koalas, & Scientists", - "url": "https://www.goodgoodgood.co/articles/good-news-this-week-september-13-2025?utm_source=openai" - }, - { - "type": "url_citation", - "end_index": 1069, - "start_index": 926, - "title": "GNR for Tuesday, September 16, 2025 — More and more good news!", - "url": "https://www.dailykos.com/story/2025/9/16/2342576/-GNR-for-Tuesday-September-16-2025-More-and-more-good-news?utm_source=openai" + "end_index": 1390, + "start_index": 1271, + "title": "Seattle Weather in November 2025 | United States Averages | Weather-2-Visit", + "url": "https://www.weather2visit.com/north-america/united-states/seattle-november.htm?utm_source=openai" }, { "type": "url_citation", - "end_index": 1608, - "start_index": 1526, - "title": "The Christian Science Monitor Daily for September 16, 2025", - "url": "https://www.csmonitor.com/Daily/2025/20250916?utm_source=openai" + "end_index": 1711, + "start_index": 1580, + "title": "West Coast watches for La Niña", + "url": "https://www.axios.com/local/seattle/2025/09/04/la-nina-west-coast-winter-weather-seattle-san-diego?utm_source=openai" } ], "logprobs": [], - "text": "On September 16, 2025, several positive news stories emerged:\n\n**Environmental Conservation in Australia**\n\nThe New South Wales government in Australia announced the creation of a significant national park to protect over 12,000 koalas. This initiative adds 176,000 hectares of forest to existing reserves, making it one of the state's largest national parks. The park will also impose an immediate ban on logging within its boundaries, safeguarding critical biodiversity hotspots. ([goodgoodgood.co](https://www.goodgoodgood.co/articles/good-news-this-week-september-13-2025?utm_source=openai))\n\n**Advancements in Renewable Energy**\n\nSingapore is set to begin constructing an 86-megawatt floating solar farm on the Pandan Reservoir, its largest reservoir. This project aims to generate 296 megawatts of clean energy, contributing significantly to the country's efforts to decarbonize and meet its energy demands sustainably. ([dailykos.com](https://www.dailykos.com/story/2025/9/16/2342576/-GNR-for-Tuesday-September-16-2025-More-and-more-good-news?utm_source=openai))\n\n**Global Efforts to Combat Overfishing**\n\nA global agreement to curb overfishing took effect, requiring governments to cut subsidies to fishing fleets to help protect dwindling stocks. The World Trade Organization deal entered into force after Brazil, Kenya, Tonga, and Vietnam signed on, bringing the total to 112 countries. The organization touts the deal as its first environmental pact and the first binding multilateral deal on ocean sustainability. ([csmonitor.com](https://www.csmonitor.com/Daily/2025/20250916?utm_source=openai))\n\nThese stories highlight significant strides in environmental conservation and sustainable energy, reflecting a global commitment to addressing ecological challenges. " + "text": "As of Monday, November 10, 2025, in Seattle, Washington, the weather is mostly cloudy with a temperature of 58°F (14°C).\n\n## Weather for Seattle, WA:\nCurrent Conditions: Mostly cloudy, 58°F (14°C)\n\nDaily Forecast:\n* Monday, November 10: Low: 45°F (7°C), High: 58°F (15°C), Description: Mostly cloudy\n* Tuesday, November 11: Low: 46°F (8°C), High: 54°F (12°C), Description: Times of clouds and sun\n* Wednesday, November 12: Low: 49°F (9°C), High: 55°F (13°C), Description: Cloudy with a couple of showers\n* Thursday, November 13: Low: 44°F (6°C), High: 55°F (13°C), Description: Cloudy with a bit of rain; winds gusting to 65 kph in the afternoon\n* Friday, November 14: Low: 45°F (7°C), High: 50°F (10°C), Description: Breezy in the morning; cloudy with a couple of showers\n* Saturday, November 15: Low: 47°F (9°C), High: 56°F (14°C), Description: Cloudy and not as cool; an afternoon shower in the area\n* Sunday, November 16: Low: 43°F (6°C), High: 53°F (12°C), Description: Rain\n\n\nIn November, Seattle typically experiences cooler temperatures and increased rainfall. The average high temperature is around 50°F (10°C), with lows near 40°F (4°C). The city averages about 17 days of rain during the month, accumulating approximately 5.9 inches (150 mm) of precipitation. ([weather2visit.com](https://www.weather2visit.com/north-america/united-states/seattle-november.htm?utm_source=openai))\n\nAdditionally, Seattle is currently monitoring a developing La Niña event, which could influence winter weather patterns, potentially leading to colder and wetter conditions in the region. ([axios.com](https://www.axios.com/local/seattle/2025/09/04/la-nina-west-coast-winter-weather-seattle-san-diego?utm_source=openai))\n\nPlease note that weather conditions can change rapidly; for the most current information, consult local weather services. " } ], "role": "assistant" @@ -114,6 +110,7 @@ "parallel_tool_calls": true, "previous_response_id": null, "prompt_cache_key": null, + "prompt_cache_retention": null, "reasoning": { "effort": null, "summary": null @@ -148,15 +145,15 @@ "top_p": 1.0, "truncation": "disabled", "usage": { - "input_tokens": 312, + "input_tokens": 316, "input_tokens_details": { "cached_tokens": 0 }, - "output_tokens": 368, + "output_tokens": 498, "output_tokens_details": { "reasoning_tokens": 0 }, - "total_tokens": 680 + "total_tokens": 814 }, "user": null, "metadata": {} From 036535aacb033a9d8ef85173125c0883fbd9f9fc Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Tue, 11 Nov 2025 13:08:59 -0600 Subject: [PATCH 08/15] fb --- .../CreateResponseOptions.Serialization.cs | 4 +- src/Custom/Responses/CreateResponseOptions.cs | 5 +- .../Responses/ResponseCreationOptions.cs | 15 +- .../Responses/ResponseResult.Serialization.cs | 4 +- src/Custom/Responses/ResponseResult.cs | 4 +- .../StreamingResponseCompletedUpdate.cs | 1 - .../StreamingResponseCreatedUpdate.cs | 1 - .../StreamingResponseFailedUpdate.cs | 1 - .../StreamingResponseInProgressUpdate.cs | 1 - .../StreamingResponseIncompleteUpdate.cs | 1 - .../StreamingResponseQueuedUpdate.cs | 1 - src/Generated/Models/ModelIdsResponses.cs | 224 ++++++++++++++++++ .../Responses/InternalModelIdsResponses.cs | 223 ----------------- .../Models/Responses/OpenAIResponse.cs | 1 - .../Responses/ResponseCreationOptions.cs | 1 - ...ngResponseCompletedUpdate.Serialization.cs | 12 + .../StreamingResponseCompletedUpdate.cs | 13 + ...mingResponseCreatedUpdate.Serialization.cs | 12 + .../StreamingResponseCreatedUpdate.cs | 13 + ...amingResponseFailedUpdate.Serialization.cs | 12 + .../StreamingResponseFailedUpdate.cs | 13 + ...gResponseInProgressUpdate.Serialization.cs | 12 + .../StreamingResponseInProgressUpdate.cs | 13 + ...gResponseIncompleteUpdate.Serialization.cs | 12 + .../StreamingResponseIncompleteUpdate.cs | 13 + ...amingResponseQueuedUpdate.Serialization.cs | 12 + .../StreamingResponseQueuedUpdate.cs | 13 + src/Generated/OpenAIModelFactory.cs | 34 --- 28 files changed, 390 insertions(+), 281 deletions(-) create mode 100644 src/Generated/Models/ModelIdsResponses.cs delete mode 100644 src/Generated/Models/Responses/InternalModelIdsResponses.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs create mode 100644 src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs diff --git a/src/Custom/Responses/CreateResponseOptions.Serialization.cs b/src/Custom/Responses/CreateResponseOptions.Serialization.cs index f67adfe24..5abd9dfa7 100644 --- a/src/Custom/Responses/CreateResponseOptions.Serialization.cs +++ b/src/Custom/Responses/CreateResponseOptions.Serialization.cs @@ -246,7 +246,7 @@ internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonEleme string user = default; ResponseServiceTier? serviceTier = default; string previousResponseId = default; - InternalModelIdsResponses? model = default; + ModelIdsResponses? model = default; ResponseReasoningOptions reasoning = default; bool? background = default; int? maxOutputTokens = default; @@ -336,7 +336,7 @@ internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonEleme { continue; } - model = new InternalModelIdsResponses(prop.Value.GetString()); + model = new ModelIdsResponses(prop.Value.GetString()); continue; } if (prop.NameEquals("reasoning"u8)) diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index f202c6c87..76eaf1f14 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -1,4 +1,3 @@ -using System; using System.ClientModel.Primitives; using System.Collections.Generic; using System.ComponentModel; @@ -24,7 +23,7 @@ public CreateResponseOptions(List input) } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, IList input, IList include, bool? parallelToolCalls, bool? store, bool? stream, in JsonPatch patch) + internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, ModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, IList input, IList include, bool? parallelToolCalls, bool? store, bool? stream, in JsonPatch patch) { // Plugin customization: ensure initialization of collections Metadata = metadata ?? new ChangeTrackingDictionary(); @@ -68,7 +67,7 @@ internal CreateResponseOptions(IDictionary metadata, float? temp public string PreviousResponseId { get; set; } - internal InternalModelIdsResponses? Model { get; set; } + public ModelIdsResponses? Model { get; set; } public ResponseReasoningOptions Reasoning { get; set; } diff --git a/src/Custom/Responses/ResponseCreationOptions.cs b/src/Custom/Responses/ResponseCreationOptions.cs index 250810292..55d408b19 100644 --- a/src/Custom/Responses/ResponseCreationOptions.cs +++ b/src/Custom/Responses/ResponseCreationOptions.cs @@ -71,13 +71,14 @@ internal partial class ResponseCreationOptions [CodeGenMember("Tools")] public IList Tools { get; } - internal ResponseCreationOptions GetClone() - { - ResponseCreationOptions copiedOptions = (ResponseCreationOptions)this.MemberwiseClone(); - copiedOptions.Patch = _patch; - - return copiedOptions; - } + internal ResponseCreationOptions GetClone() + { + ResponseCreationOptions copiedOptions = (ResponseCreationOptions)this.MemberwiseClone(); + #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + copiedOptions.Patch = _patch; + #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + return copiedOptions; + } internal BinaryContent ToBinaryContent() => BinaryContent.Create(this, ModelSerializationExtensions.WireOptions); } diff --git a/src/Custom/Responses/ResponseResult.Serialization.cs b/src/Custom/Responses/ResponseResult.Serialization.cs index c92ab45f4..7119e2714 100644 --- a/src/Custom/Responses/ResponseResult.Serialization.cs +++ b/src/Custom/Responses/ResponseResult.Serialization.cs @@ -276,7 +276,7 @@ internal static ResponseResult DeserializeResponseResult(JsonElement element, Bi string user = default; ResponseServiceTier? serviceTier = default; string previousResponseId = default; - InternalModelIdsResponses? model = default; + ModelIdsResponses? model = default; ResponseReasoningOptions reasoning = default; bool? background = default; int? maxOutputTokens = default; @@ -377,7 +377,7 @@ internal static ResponseResult DeserializeResponseResult(JsonElement element, Bi { continue; } - model = new InternalModelIdsResponses(prop.Value.GetString()); + model = new ModelIdsResponses(prop.Value.GetString()); continue; } if (prop.NameEquals("reasoning"u8)) diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs index af0f79c7e..45adcdfdc 100644 --- a/src/Custom/Responses/ResponseResult.cs +++ b/src/Custom/Responses/ResponseResult.cs @@ -30,7 +30,7 @@ internal ResponseResult(IDictionary metadata, float? temperature } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, InternalModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, string id, string @object, ResponseStatus? status, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IList output, string outputText, ResponseTokenUsage usage, bool parallelToolCalls, in JsonPatch patch) + internal ResponseResult(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, ModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, string id, string @object, ResponseStatus? status, DateTimeOffset createdAt, ResponseError error, ResponseIncompleteStatusDetails incompleteDetails, IList output, string outputText, ResponseTokenUsage usage, bool parallelToolCalls, in JsonPatch patch) { // Plugin customization: ensure initialization of collections Metadata = metadata ?? new ChangeTrackingDictionary(); @@ -79,7 +79,7 @@ internal ResponseResult(IDictionary metadata, float? temperature public string PreviousResponseId { get; } - internal InternalModelIdsResponses? InternalModel { get; } + public ModelIdsResponses? InternalModel { get; } public string Model => InternalModel?.ToString(); diff --git a/src/Custom/Responses/StreamingResponseCompletedUpdate.cs b/src/Custom/Responses/StreamingResponseCompletedUpdate.cs index 0bb83e4e6..5a09a4b38 100644 --- a/src/Custom/Responses/StreamingResponseCompletedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseCompletedUpdate.cs @@ -3,7 +3,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseCompletedUpdate : StreamingResponseUpdate { internal StreamingResponseCompletedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseCompleted, sequenceNumber) diff --git a/src/Custom/Responses/StreamingResponseCreatedUpdate.cs b/src/Custom/Responses/StreamingResponseCreatedUpdate.cs index 06e7a88c9..fc2824174 100644 --- a/src/Custom/Responses/StreamingResponseCreatedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseCreatedUpdate.cs @@ -7,7 +7,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseCreatedUpdate : StreamingResponseUpdate { internal StreamingResponseCreatedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseCreated, sequenceNumber) diff --git a/src/Custom/Responses/StreamingResponseFailedUpdate.cs b/src/Custom/Responses/StreamingResponseFailedUpdate.cs index 55ad666b5..db72c1a2d 100644 --- a/src/Custom/Responses/StreamingResponseFailedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseFailedUpdate.cs @@ -3,7 +3,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseFailedUpdate : StreamingResponseUpdate { internal StreamingResponseFailedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseFailed, sequenceNumber) diff --git a/src/Custom/Responses/StreamingResponseInProgressUpdate.cs b/src/Custom/Responses/StreamingResponseInProgressUpdate.cs index 77413b5af..5899f8639 100644 --- a/src/Custom/Responses/StreamingResponseInProgressUpdate.cs +++ b/src/Custom/Responses/StreamingResponseInProgressUpdate.cs @@ -3,7 +3,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseInProgressUpdate : StreamingResponseUpdate { internal StreamingResponseInProgressUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseInProgress, sequenceNumber) diff --git a/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs b/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs index 8fc48ec49..329999676 100644 --- a/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs +++ b/src/Custom/Responses/StreamingResponseIncompleteUpdate.cs @@ -3,7 +3,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseIncompleteUpdate : StreamingResponseUpdate { internal StreamingResponseIncompleteUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseIncomplete, sequenceNumber) diff --git a/src/Custom/Responses/StreamingResponseQueuedUpdate.cs b/src/Custom/Responses/StreamingResponseQueuedUpdate.cs index 651dc4b6c..b3f5cda02 100644 --- a/src/Custom/Responses/StreamingResponseQueuedUpdate.cs +++ b/src/Custom/Responses/StreamingResponseQueuedUpdate.cs @@ -3,7 +3,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] public partial class StreamingResponseQueuedUpdate : StreamingResponseUpdate { internal StreamingResponseQueuedUpdate(int sequenceNumber, ResponseResult response) : base(InternalResponseStreamEventType.ResponseQueued, sequenceNumber) diff --git a/src/Generated/Models/ModelIdsResponses.cs b/src/Generated/Models/ModelIdsResponses.cs new file mode 100644 index 000000000..649190868 --- /dev/null +++ b/src/Generated/Models/ModelIdsResponses.cs @@ -0,0 +1,224 @@ +// + +#nullable disable + +using System; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI +{ + [Experimental("OPENAI001")] + public readonly partial struct ModelIdsResponses : IEquatable + { + private readonly string _value; + private const string Gpt41Value = "gpt-4.1"; + private const string Gpt41MiniValue = "gpt-4.1-mini"; + private const string Gpt41NanoValue = "gpt-4.1-nano"; + private const string Gpt4120250414Value = "gpt-4.1-2025-04-14"; + private const string Gpt41Mini20250414Value = "gpt-4.1-mini-2025-04-14"; + private const string Gpt41Nano20250414Value = "gpt-4.1-nano-2025-04-14"; + private const string O4MiniValue = "o4-mini"; + private const string O4Mini20250416Value = "o4-mini-2025-04-16"; + private const string O3Value = "o3"; + private const string O320250416Value = "o3-2025-04-16"; + private const string O3MiniValue = "o3-mini"; + private const string O3Mini20250131Value = "o3-mini-2025-01-31"; + private const string O1Value = "o1"; + private const string O120241217Value = "o1-2024-12-17"; + private const string O1PreviewValue = "o1-preview"; + private const string O1Preview20240912Value = "o1-preview-2024-09-12"; + private const string O1MiniValue = "o1-mini"; + private const string O1Mini20240912Value = "o1-mini-2024-09-12"; + private const string Gpt4oValue = "gpt-4o"; + private const string Gpt4o20241120Value = "gpt-4o-2024-11-20"; + private const string Gpt4o20240806Value = "gpt-4o-2024-08-06"; + private const string Gpt4o20240513Value = "gpt-4o-2024-05-13"; + private const string Gpt4oAudioPreviewValue = "gpt-4o-audio-preview"; + private const string Gpt4oAudioPreview20241001Value = "gpt-4o-audio-preview-2024-10-01"; + private const string Gpt4oAudioPreview20241217Value = "gpt-4o-audio-preview-2024-12-17"; + private const string Gpt4oAudioPreview20250603Value = "gpt-4o-audio-preview-2025-06-03"; + private const string Gpt4oMiniAudioPreviewValue = "gpt-4o-mini-audio-preview"; + private const string Gpt4oMiniAudioPreview20241217Value = "gpt-4o-mini-audio-preview-2024-12-17"; + private const string Gpt4oSearchPreviewValue = "gpt-4o-search-preview"; + private const string Gpt4oMiniSearchPreviewValue = "gpt-4o-mini-search-preview"; + private const string Gpt4oSearchPreview20250311Value = "gpt-4o-search-preview-2025-03-11"; + private const string Gpt4oMiniSearchPreview20250311Value = "gpt-4o-mini-search-preview-2025-03-11"; + private const string Chatgpt4oLatestValue = "chatgpt-4o-latest"; + private const string CodexMiniLatestValue = "codex-mini-latest"; + private const string Gpt4oMiniValue = "gpt-4o-mini"; + private const string Gpt4oMini20240718Value = "gpt-4o-mini-2024-07-18"; + private const string Gpt4TurboValue = "gpt-4-turbo"; + private const string Gpt4Turbo20240409Value = "gpt-4-turbo-2024-04-09"; + private const string Gpt40125PreviewValue = "gpt-4-0125-preview"; + private const string Gpt4TurboPreviewValue = "gpt-4-turbo-preview"; + private const string Gpt41106PreviewValue = "gpt-4-1106-preview"; + private const string Gpt4VisionPreviewValue = "gpt-4-vision-preview"; + private const string Gpt4Value = "gpt-4"; + private const string Gpt40314Value = "gpt-4-0314"; + private const string Gpt40613Value = "gpt-4-0613"; + private const string Gpt432kValue = "gpt-4-32k"; + private const string Gpt432k0314Value = "gpt-4-32k-0314"; + private const string Gpt432k0613Value = "gpt-4-32k-0613"; + private const string Gpt35TurboValue = "gpt-3.5-turbo"; + private const string Gpt35Turbo16kValue = "gpt-3.5-turbo-16k"; + private const string Gpt35Turbo0301Value = "gpt-3.5-turbo-0301"; + private const string Gpt35Turbo0613Value = "gpt-3.5-turbo-0613"; + private const string Gpt35Turbo1106Value = "gpt-3.5-turbo-1106"; + private const string Gpt35Turbo0125Value = "gpt-3.5-turbo-0125"; + private const string Gpt35Turbo16k0613Value = "gpt-3.5-turbo-16k-0613"; + private const string O1ProValue = "o1-pro"; + private const string O1Pro20250319Value = "o1-pro-2025-03-19"; + private const string O3ProValue = "o3-pro"; + private const string O3Pro20250610Value = "o3-pro-2025-06-10"; + private const string ComputerUsePreviewValue = "computer-use-preview"; + private const string ComputerUsePreview20250311Value = "computer-use-preview-2025-03-11"; + + public ModelIdsResponses(string value) + { + Argument.AssertNotNull(value, nameof(value)); + + _value = value; + } + + public static ModelIdsResponses Gpt41 { get; } = new ModelIdsResponses(Gpt41Value); + + public static ModelIdsResponses Gpt41Mini { get; } = new ModelIdsResponses(Gpt41MiniValue); + + public static ModelIdsResponses Gpt41Nano { get; } = new ModelIdsResponses(Gpt41NanoValue); + + public static ModelIdsResponses Gpt4120250414 { get; } = new ModelIdsResponses(Gpt4120250414Value); + + public static ModelIdsResponses Gpt41Mini20250414 { get; } = new ModelIdsResponses(Gpt41Mini20250414Value); + + public static ModelIdsResponses Gpt41Nano20250414 { get; } = new ModelIdsResponses(Gpt41Nano20250414Value); + + public static ModelIdsResponses O4Mini { get; } = new ModelIdsResponses(O4MiniValue); + + public static ModelIdsResponses O4Mini20250416 { get; } = new ModelIdsResponses(O4Mini20250416Value); + + public static ModelIdsResponses O3 { get; } = new ModelIdsResponses(O3Value); + + public static ModelIdsResponses O320250416 { get; } = new ModelIdsResponses(O320250416Value); + + public static ModelIdsResponses O3Mini { get; } = new ModelIdsResponses(O3MiniValue); + + public static ModelIdsResponses O3Mini20250131 { get; } = new ModelIdsResponses(O3Mini20250131Value); + + public static ModelIdsResponses O1 { get; } = new ModelIdsResponses(O1Value); + + public static ModelIdsResponses O120241217 { get; } = new ModelIdsResponses(O120241217Value); + + public static ModelIdsResponses O1Preview { get; } = new ModelIdsResponses(O1PreviewValue); + + public static ModelIdsResponses O1Preview20240912 { get; } = new ModelIdsResponses(O1Preview20240912Value); + + public static ModelIdsResponses O1Mini { get; } = new ModelIdsResponses(O1MiniValue); + + public static ModelIdsResponses O1Mini20240912 { get; } = new ModelIdsResponses(O1Mini20240912Value); + + public static ModelIdsResponses Gpt4o { get; } = new ModelIdsResponses(Gpt4oValue); + + public static ModelIdsResponses Gpt4o20241120 { get; } = new ModelIdsResponses(Gpt4o20241120Value); + + public static ModelIdsResponses Gpt4o20240806 { get; } = new ModelIdsResponses(Gpt4o20240806Value); + + public static ModelIdsResponses Gpt4o20240513 { get; } = new ModelIdsResponses(Gpt4o20240513Value); + + public static ModelIdsResponses Gpt4oAudioPreview { get; } = new ModelIdsResponses(Gpt4oAudioPreviewValue); + + public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241001Value); + + public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241217Value); + + public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20250603Value); + + public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreviewValue); + + public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreview20241217Value); + + public static ModelIdsResponses Gpt4oSearchPreview { get; } = new ModelIdsResponses(Gpt4oSearchPreviewValue); + + public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreviewValue); + + public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oSearchPreview20250311Value); + + public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreview20250311Value); + + public static ModelIdsResponses Chatgpt4oLatest { get; } = new ModelIdsResponses(Chatgpt4oLatestValue); + + public static ModelIdsResponses CodexMiniLatest { get; } = new ModelIdsResponses(CodexMiniLatestValue); + + public static ModelIdsResponses Gpt4oMini { get; } = new ModelIdsResponses(Gpt4oMiniValue); + + public static ModelIdsResponses Gpt4oMini20240718 { get; } = new ModelIdsResponses(Gpt4oMini20240718Value); + + public static ModelIdsResponses Gpt4Turbo { get; } = new ModelIdsResponses(Gpt4TurboValue); + + public static ModelIdsResponses Gpt4Turbo20240409 { get; } = new ModelIdsResponses(Gpt4Turbo20240409Value); + + public static ModelIdsResponses Gpt40125Preview { get; } = new ModelIdsResponses(Gpt40125PreviewValue); + + public static ModelIdsResponses Gpt4TurboPreview { get; } = new ModelIdsResponses(Gpt4TurboPreviewValue); + + public static ModelIdsResponses Gpt41106Preview { get; } = new ModelIdsResponses(Gpt41106PreviewValue); + + public static ModelIdsResponses Gpt4VisionPreview { get; } = new ModelIdsResponses(Gpt4VisionPreviewValue); + + public static ModelIdsResponses Gpt4 { get; } = new ModelIdsResponses(Gpt4Value); + + public static ModelIdsResponses Gpt40314 { get; } = new ModelIdsResponses(Gpt40314Value); + + public static ModelIdsResponses Gpt40613 { get; } = new ModelIdsResponses(Gpt40613Value); + + public static ModelIdsResponses Gpt432k { get; } = new ModelIdsResponses(Gpt432kValue); + + public static ModelIdsResponses Gpt432k0314 { get; } = new ModelIdsResponses(Gpt432k0314Value); + + public static ModelIdsResponses Gpt432k0613 { get; } = new ModelIdsResponses(Gpt432k0613Value); + + public static ModelIdsResponses Gpt35Turbo { get; } = new ModelIdsResponses(Gpt35TurboValue); + + public static ModelIdsResponses Gpt35Turbo16k { get; } = new ModelIdsResponses(Gpt35Turbo16kValue); + + public static ModelIdsResponses Gpt35Turbo0301 { get; } = new ModelIdsResponses(Gpt35Turbo0301Value); + + public static ModelIdsResponses Gpt35Turbo0613 { get; } = new ModelIdsResponses(Gpt35Turbo0613Value); + + public static ModelIdsResponses Gpt35Turbo1106 { get; } = new ModelIdsResponses(Gpt35Turbo1106Value); + + public static ModelIdsResponses Gpt35Turbo0125 { get; } = new ModelIdsResponses(Gpt35Turbo0125Value); + + public static ModelIdsResponses Gpt35Turbo16k0613 { get; } = new ModelIdsResponses(Gpt35Turbo16k0613Value); + + public static ModelIdsResponses O1Pro { get; } = new ModelIdsResponses(O1ProValue); + + public static ModelIdsResponses O1Pro20250319 { get; } = new ModelIdsResponses(O1Pro20250319Value); + + public static ModelIdsResponses O3Pro { get; } = new ModelIdsResponses(O3ProValue); + + public static ModelIdsResponses O3Pro20250610 { get; } = new ModelIdsResponses(O3Pro20250610Value); + + public static ModelIdsResponses ComputerUsePreview { get; } = new ModelIdsResponses(ComputerUsePreviewValue); + + public static ModelIdsResponses ComputerUsePreview20250311 { get; } = new ModelIdsResponses(ComputerUsePreview20250311Value); + + public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right) => left.Equals(right); + + public static bool operator !=(ModelIdsResponses left, ModelIdsResponses right) => !left.Equals(right); + + public static implicit operator ModelIdsResponses(string value) => new ModelIdsResponses(value); + + public static implicit operator ModelIdsResponses?(string value) => value == null ? null : new ModelIdsResponses(value); + + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ModelIdsResponses other && Equals(other); + + public bool Equals(ModelIdsResponses other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + + public override string ToString() => _value; + } +} diff --git a/src/Generated/Models/Responses/InternalModelIdsResponses.cs b/src/Generated/Models/Responses/InternalModelIdsResponses.cs deleted file mode 100644 index 8098a6cee..000000000 --- a/src/Generated/Models/Responses/InternalModelIdsResponses.cs +++ /dev/null @@ -1,223 +0,0 @@ -// - -#nullable disable - -using System; -using System.ComponentModel; -using OpenAI; - -namespace OpenAI.Responses -{ - internal readonly partial struct InternalModelIdsResponses : IEquatable - { - private readonly string _value; - private const string Gpt41Value = "gpt-4.1"; - private const string Gpt41MiniValue = "gpt-4.1-mini"; - private const string Gpt41NanoValue = "gpt-4.1-nano"; - private const string Gpt4120250414Value = "gpt-4.1-2025-04-14"; - private const string Gpt41Mini20250414Value = "gpt-4.1-mini-2025-04-14"; - private const string Gpt41Nano20250414Value = "gpt-4.1-nano-2025-04-14"; - private const string O4MiniValue = "o4-mini"; - private const string O4Mini20250416Value = "o4-mini-2025-04-16"; - private const string O3Value = "o3"; - private const string O320250416Value = "o3-2025-04-16"; - private const string O3MiniValue = "o3-mini"; - private const string O3Mini20250131Value = "o3-mini-2025-01-31"; - private const string O1Value = "o1"; - private const string O120241217Value = "o1-2024-12-17"; - private const string O1PreviewValue = "o1-preview"; - private const string O1Preview20240912Value = "o1-preview-2024-09-12"; - private const string O1MiniValue = "o1-mini"; - private const string O1Mini20240912Value = "o1-mini-2024-09-12"; - private const string Gpt4oValue = "gpt-4o"; - private const string Gpt4o20241120Value = "gpt-4o-2024-11-20"; - private const string Gpt4o20240806Value = "gpt-4o-2024-08-06"; - private const string Gpt4o20240513Value = "gpt-4o-2024-05-13"; - private const string Gpt4oAudioPreviewValue = "gpt-4o-audio-preview"; - private const string Gpt4oAudioPreview20241001Value = "gpt-4o-audio-preview-2024-10-01"; - private const string Gpt4oAudioPreview20241217Value = "gpt-4o-audio-preview-2024-12-17"; - private const string Gpt4oAudioPreview20250603Value = "gpt-4o-audio-preview-2025-06-03"; - private const string Gpt4oMiniAudioPreviewValue = "gpt-4o-mini-audio-preview"; - private const string Gpt4oMiniAudioPreview20241217Value = "gpt-4o-mini-audio-preview-2024-12-17"; - private const string Gpt4oSearchPreviewValue = "gpt-4o-search-preview"; - private const string Gpt4oMiniSearchPreviewValue = "gpt-4o-mini-search-preview"; - private const string Gpt4oSearchPreview20250311Value = "gpt-4o-search-preview-2025-03-11"; - private const string Gpt4oMiniSearchPreview20250311Value = "gpt-4o-mini-search-preview-2025-03-11"; - private const string Chatgpt4oLatestValue = "chatgpt-4o-latest"; - private const string CodexMiniLatestValue = "codex-mini-latest"; - private const string Gpt4oMiniValue = "gpt-4o-mini"; - private const string Gpt4oMini20240718Value = "gpt-4o-mini-2024-07-18"; - private const string Gpt4TurboValue = "gpt-4-turbo"; - private const string Gpt4Turbo20240409Value = "gpt-4-turbo-2024-04-09"; - private const string Gpt40125PreviewValue = "gpt-4-0125-preview"; - private const string Gpt4TurboPreviewValue = "gpt-4-turbo-preview"; - private const string Gpt41106PreviewValue = "gpt-4-1106-preview"; - private const string Gpt4VisionPreviewValue = "gpt-4-vision-preview"; - private const string Gpt4Value = "gpt-4"; - private const string Gpt40314Value = "gpt-4-0314"; - private const string Gpt40613Value = "gpt-4-0613"; - private const string Gpt432kValue = "gpt-4-32k"; - private const string Gpt432k0314Value = "gpt-4-32k-0314"; - private const string Gpt432k0613Value = "gpt-4-32k-0613"; - private const string Gpt35TurboValue = "gpt-3.5-turbo"; - private const string Gpt35Turbo16kValue = "gpt-3.5-turbo-16k"; - private const string Gpt35Turbo0301Value = "gpt-3.5-turbo-0301"; - private const string Gpt35Turbo0613Value = "gpt-3.5-turbo-0613"; - private const string Gpt35Turbo1106Value = "gpt-3.5-turbo-1106"; - private const string Gpt35Turbo0125Value = "gpt-3.5-turbo-0125"; - private const string Gpt35Turbo16k0613Value = "gpt-3.5-turbo-16k-0613"; - private const string O1ProValue = "o1-pro"; - private const string O1Pro20250319Value = "o1-pro-2025-03-19"; - private const string O3ProValue = "o3-pro"; - private const string O3Pro20250610Value = "o3-pro-2025-06-10"; - private const string ComputerUsePreviewValue = "computer-use-preview"; - private const string ComputerUsePreview20250311Value = "computer-use-preview-2025-03-11"; - - public InternalModelIdsResponses(string value) - { - Argument.AssertNotNull(value, nameof(value)); - - _value = value; - } - - internal static InternalModelIdsResponses Gpt41 { get; } = new InternalModelIdsResponses(Gpt41Value); - - internal static InternalModelIdsResponses Gpt41Mini { get; } = new InternalModelIdsResponses(Gpt41MiniValue); - - internal static InternalModelIdsResponses Gpt41Nano { get; } = new InternalModelIdsResponses(Gpt41NanoValue); - - internal static InternalModelIdsResponses Gpt4120250414 { get; } = new InternalModelIdsResponses(Gpt4120250414Value); - - internal static InternalModelIdsResponses Gpt41Mini20250414 { get; } = new InternalModelIdsResponses(Gpt41Mini20250414Value); - - internal static InternalModelIdsResponses Gpt41Nano20250414 { get; } = new InternalModelIdsResponses(Gpt41Nano20250414Value); - - internal static InternalModelIdsResponses O4Mini { get; } = new InternalModelIdsResponses(O4MiniValue); - - internal static InternalModelIdsResponses O4Mini20250416 { get; } = new InternalModelIdsResponses(O4Mini20250416Value); - - internal static InternalModelIdsResponses O3 { get; } = new InternalModelIdsResponses(O3Value); - - internal static InternalModelIdsResponses O320250416 { get; } = new InternalModelIdsResponses(O320250416Value); - - internal static InternalModelIdsResponses O3Mini { get; } = new InternalModelIdsResponses(O3MiniValue); - - internal static InternalModelIdsResponses O3Mini20250131 { get; } = new InternalModelIdsResponses(O3Mini20250131Value); - - internal static InternalModelIdsResponses O1 { get; } = new InternalModelIdsResponses(O1Value); - - internal static InternalModelIdsResponses O120241217 { get; } = new InternalModelIdsResponses(O120241217Value); - - internal static InternalModelIdsResponses O1Preview { get; } = new InternalModelIdsResponses(O1PreviewValue); - - internal static InternalModelIdsResponses O1Preview20240912 { get; } = new InternalModelIdsResponses(O1Preview20240912Value); - - internal static InternalModelIdsResponses O1Mini { get; } = new InternalModelIdsResponses(O1MiniValue); - - internal static InternalModelIdsResponses O1Mini20240912 { get; } = new InternalModelIdsResponses(O1Mini20240912Value); - - internal static InternalModelIdsResponses Gpt4o { get; } = new InternalModelIdsResponses(Gpt4oValue); - - internal static InternalModelIdsResponses Gpt4o20241120 { get; } = new InternalModelIdsResponses(Gpt4o20241120Value); - - internal static InternalModelIdsResponses Gpt4o20240806 { get; } = new InternalModelIdsResponses(Gpt4o20240806Value); - - internal static InternalModelIdsResponses Gpt4o20240513 { get; } = new InternalModelIdsResponses(Gpt4o20240513Value); - - internal static InternalModelIdsResponses Gpt4oAudioPreview { get; } = new InternalModelIdsResponses(Gpt4oAudioPreviewValue); - - internal static InternalModelIdsResponses Gpt4oAudioPreview20241001 { get; } = new InternalModelIdsResponses(Gpt4oAudioPreview20241001Value); - - internal static InternalModelIdsResponses Gpt4oAudioPreview20241217 { get; } = new InternalModelIdsResponses(Gpt4oAudioPreview20241217Value); - - internal static InternalModelIdsResponses Gpt4oAudioPreview20250603 { get; } = new InternalModelIdsResponses(Gpt4oAudioPreview20250603Value); - - internal static InternalModelIdsResponses Gpt4oMiniAudioPreview { get; } = new InternalModelIdsResponses(Gpt4oMiniAudioPreviewValue); - - internal static InternalModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } = new InternalModelIdsResponses(Gpt4oMiniAudioPreview20241217Value); - - internal static InternalModelIdsResponses Gpt4oSearchPreview { get; } = new InternalModelIdsResponses(Gpt4oSearchPreviewValue); - - internal static InternalModelIdsResponses Gpt4oMiniSearchPreview { get; } = new InternalModelIdsResponses(Gpt4oMiniSearchPreviewValue); - - internal static InternalModelIdsResponses Gpt4oSearchPreview20250311 { get; } = new InternalModelIdsResponses(Gpt4oSearchPreview20250311Value); - - internal static InternalModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } = new InternalModelIdsResponses(Gpt4oMiniSearchPreview20250311Value); - - internal static InternalModelIdsResponses Chatgpt4oLatest { get; } = new InternalModelIdsResponses(Chatgpt4oLatestValue); - - internal static InternalModelIdsResponses CodexMiniLatest { get; } = new InternalModelIdsResponses(CodexMiniLatestValue); - - internal static InternalModelIdsResponses Gpt4oMini { get; } = new InternalModelIdsResponses(Gpt4oMiniValue); - - internal static InternalModelIdsResponses Gpt4oMini20240718 { get; } = new InternalModelIdsResponses(Gpt4oMini20240718Value); - - internal static InternalModelIdsResponses Gpt4Turbo { get; } = new InternalModelIdsResponses(Gpt4TurboValue); - - internal static InternalModelIdsResponses Gpt4Turbo20240409 { get; } = new InternalModelIdsResponses(Gpt4Turbo20240409Value); - - internal static InternalModelIdsResponses Gpt40125Preview { get; } = new InternalModelIdsResponses(Gpt40125PreviewValue); - - internal static InternalModelIdsResponses Gpt4TurboPreview { get; } = new InternalModelIdsResponses(Gpt4TurboPreviewValue); - - internal static InternalModelIdsResponses Gpt41106Preview { get; } = new InternalModelIdsResponses(Gpt41106PreviewValue); - - internal static InternalModelIdsResponses Gpt4VisionPreview { get; } = new InternalModelIdsResponses(Gpt4VisionPreviewValue); - - internal static InternalModelIdsResponses Gpt4 { get; } = new InternalModelIdsResponses(Gpt4Value); - - internal static InternalModelIdsResponses Gpt40314 { get; } = new InternalModelIdsResponses(Gpt40314Value); - - internal static InternalModelIdsResponses Gpt40613 { get; } = new InternalModelIdsResponses(Gpt40613Value); - - internal static InternalModelIdsResponses Gpt432k { get; } = new InternalModelIdsResponses(Gpt432kValue); - - internal static InternalModelIdsResponses Gpt432k0314 { get; } = new InternalModelIdsResponses(Gpt432k0314Value); - - internal static InternalModelIdsResponses Gpt432k0613 { get; } = new InternalModelIdsResponses(Gpt432k0613Value); - - internal static InternalModelIdsResponses Gpt35Turbo { get; } = new InternalModelIdsResponses(Gpt35TurboValue); - - internal static InternalModelIdsResponses Gpt35Turbo16k { get; } = new InternalModelIdsResponses(Gpt35Turbo16kValue); - - internal static InternalModelIdsResponses Gpt35Turbo0301 { get; } = new InternalModelIdsResponses(Gpt35Turbo0301Value); - - internal static InternalModelIdsResponses Gpt35Turbo0613 { get; } = new InternalModelIdsResponses(Gpt35Turbo0613Value); - - internal static InternalModelIdsResponses Gpt35Turbo1106 { get; } = new InternalModelIdsResponses(Gpt35Turbo1106Value); - - internal static InternalModelIdsResponses Gpt35Turbo0125 { get; } = new InternalModelIdsResponses(Gpt35Turbo0125Value); - - internal static InternalModelIdsResponses Gpt35Turbo16k0613 { get; } = new InternalModelIdsResponses(Gpt35Turbo16k0613Value); - - internal static InternalModelIdsResponses O1Pro { get; } = new InternalModelIdsResponses(O1ProValue); - - internal static InternalModelIdsResponses O1Pro20250319 { get; } = new InternalModelIdsResponses(O1Pro20250319Value); - - internal static InternalModelIdsResponses O3Pro { get; } = new InternalModelIdsResponses(O3ProValue); - - internal static InternalModelIdsResponses O3Pro20250610 { get; } = new InternalModelIdsResponses(O3Pro20250610Value); - - internal static InternalModelIdsResponses ComputerUsePreview { get; } = new InternalModelIdsResponses(ComputerUsePreviewValue); - - internal static InternalModelIdsResponses ComputerUsePreview20250311 { get; } = new InternalModelIdsResponses(ComputerUsePreview20250311Value); - - public static bool operator ==(InternalModelIdsResponses left, InternalModelIdsResponses right) => left.Equals(right); - - public static bool operator !=(InternalModelIdsResponses left, InternalModelIdsResponses right) => !left.Equals(right); - - public static implicit operator InternalModelIdsResponses(string value) => new InternalModelIdsResponses(value); - - public static implicit operator InternalModelIdsResponses?(string value) => value == null ? null : new InternalModelIdsResponses(value); - - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) => obj is InternalModelIdsResponses other && Equals(other); - - public bool Equals(InternalModelIdsResponses other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); - - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; - - public override string ToString() => _value; - } -} diff --git a/src/Generated/Models/Responses/OpenAIResponse.cs b/src/Generated/Models/Responses/OpenAIResponse.cs index 4072bbc80..993d7c2fc 100644 --- a/src/Generated/Models/Responses/OpenAIResponse.cs +++ b/src/Generated/Models/Responses/OpenAIResponse.cs @@ -12,7 +12,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] internal partial class OpenAIResponse { [Experimental("SCME0001")] diff --git a/src/Generated/Models/Responses/ResponseCreationOptions.cs b/src/Generated/Models/Responses/ResponseCreationOptions.cs index bd78e5add..66b24ff9c 100644 --- a/src/Generated/Models/Responses/ResponseCreationOptions.cs +++ b/src/Generated/Models/Responses/ResponseCreationOptions.cs @@ -10,7 +10,6 @@ namespace OpenAI.Responses { - [Experimental("OPENAI001")] internal partial class ResponseCreationOptions { [Experimental("SCME0001")] diff --git a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs new file mode 100644 index 000000000..cad5a5c2f --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseCompletedUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs b/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs new file mode 100644 index 000000000..0ef39651b --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseCompletedUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseCompletedUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs new file mode 100644 index 000000000..f67b41eb5 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseCreatedUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs b/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs new file mode 100644 index 000000000..f9675cd40 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseCreatedUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseCreatedUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs new file mode 100644 index 000000000..7e49e88ec --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseFailedUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseFailedUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs b/src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs new file mode 100644 index 000000000..4b2ea709c --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseFailedUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseFailedUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs new file mode 100644 index 000000000..579921fd8 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseInProgressUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs b/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs new file mode 100644 index 000000000..81b6fda11 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseInProgressUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseInProgressUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs new file mode 100644 index 000000000..245de4afa --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseIncompleteUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs b/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs new file mode 100644 index 000000000..aea897843 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseIncompleteUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseIncompleteUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs b/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs new file mode 100644 index 000000000..e0092fa15 --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.Serialization.cs @@ -0,0 +1,12 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseQueuedUpdate : StreamingResponseUpdate, IJsonModel + { + } +} diff --git a/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs b/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs new file mode 100644 index 000000000..487f0edca --- /dev/null +++ b/src/Generated/Models/Responses/StreamingResponseQueuedUpdate.cs @@ -0,0 +1,13 @@ +// + +#nullable disable + +using System.Diagnostics.CodeAnalysis; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class StreamingResponseQueuedUpdate : StreamingResponseUpdate + { + } +} diff --git a/src/Generated/OpenAIModelFactory.cs b/src/Generated/OpenAIModelFactory.cs index d5c162cc0..68a06a4b2 100644 --- a/src/Generated/OpenAIModelFactory.cs +++ b/src/Generated/OpenAIModelFactory.cs @@ -652,40 +652,6 @@ public static McpToolDefinition McpToolDefinition(string name = default, string return new McpToolDefinition(name, description, inputSchema, annotations, default); } - public static OpenAIResponse OpenAIResponse(IDictionary metadata = default, float? temperature = default, float? topP = default, string endUserId = default, ResponseServiceTier? serviceTier = default, string previousResponseId = default, string model = default, ResponseReasoningOptions reasoningOptions = default, bool? backgroundModeEnabled = default, int? maxOutputTokenCount = default, string instructions = default, ResponseTextOptions textOptions = default, IEnumerable tools = default, ResponseToolChoice toolChoice = default, ResponseTruncationMode? truncationMode = default, string id = default, ResponseStatus? status = default, DateTimeOffset createdAt = default, ResponseError error = default, ResponseIncompleteStatusDetails incompleteStatusDetails = default, IEnumerable outputItems = default, ResponseTokenUsage usage = default, bool parallelToolCallsEnabled = default) - { - metadata ??= new ChangeTrackingDictionary(); - tools ??= new ChangeTrackingList(); - outputItems ??= new ChangeTrackingList(); - - return new OpenAIResponse( - metadata, - temperature, - topP, - endUserId, - serviceTier, - previousResponseId, - model, - reasoningOptions, - backgroundModeEnabled, - maxOutputTokenCount, - instructions, - textOptions, - tools.ToList(), - toolChoice, - truncationMode, - id, - "response", - status, - createdAt, - error, - incompleteStatusDetails, - outputItems.ToList(), - usage, - parallelToolCallsEnabled, - default); - } - public static ResponseError ResponseError(ResponseErrorCode code = default, string message = default) { return new ResponseError(code, message, default); From c759c613afe6a7da6a54dfd6781d018894ddca65 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Tue, 11 Nov 2025 13:21:20 -0600 Subject: [PATCH 09/15] fb --- .../Responses/Example01_SimpleResponse.cs | 2 +- .../Example01_SimpleResponseAsync.cs | 2 +- .../Example02_SimpleResponseStreaming.cs | 2 +- .../Example02_SimpleResponseStreamingAsync.cs | 2 +- .../Responses/Example03_FunctionCalling.cs | 2 +- .../Example03_FunctionCallingAsync.cs | 2 +- .../Example04_FunctionCallingStreaming.cs | 2 +- ...Example04_FunctionCallingStreamingAsync.cs | 2 +- examples/Responses/Example05_RemoteMcp.cs | 2 +- .../Responses/Example05_RemoteMcpAsync.cs | 2 +- .../Example06_RemoteMcpAuthentication.cs | 2 +- .../Example06_RemoteMcpAuthenticationAsync.cs | 2 +- .../Example07_InputAdditionalProperties.cs | 2 +- ...xample07_InputAdditionalPropertiesAsync.cs | 2 +- .../Example08_OutputAdditionalProperties.cs | 2 +- ...ample08_OutputAdditionalPropertiesAsync.cs | 2 +- .../Example09_ModelOverridePerRequest.cs | 2 +- .../Example09_ModelOverridePerRequestAsync.cs | 2 +- .../Responses/Example10_CodeInterpreter.cs | 2 +- .../Example10_CodeInterpreterAsync.cs | 2 +- src/Custom/OpenAIClient.cs | 8 +-- src/Custom/Responses/CreateResponseOptions.cs | 2 +- src/Custom/Responses/ModelIdResponses.cs | 7 ++ .../OpenAIResponseClient.Protocol.cs | 2 +- src/Custom/Responses/OpenAIResponseClient.cs | 34 +++++----- .../ResponseItemList.Serialization.cs | 40 +++++------ src/Custom/Responses/ResponseItemList.cs | 6 +- .../{ => Responses}/ModelIdsResponses.cs | 3 +- src/Generated/OpenAIClient.cs | 10 +++ ...Client.cs => ResponseClient.RestClient.cs} | 2 +- ...nAIResponseClient.cs => ResponseClient.cs} | 14 ++-- ...esponseInputItemsAsyncCollectionResult.cs} | 6 +- ...onseInputItemsAsyncCollectionResultOfT.cs} | 6 +- ...tGetResponseInputItemsCollectionResult.cs} | 6 +- ...tResponseInputItemsCollectionResultOfT.cs} | 6 +- tests/Responses/ResponseStoreTests.cs | 18 ++--- tests/Responses/ResponsesTests.cs | 66 +++++++++---------- tests/Responses/ResponsesToolTests.cs | 38 +++++------ tests/Utility/TestHelpers.cs | 2 +- 39 files changed, 167 insertions(+), 149 deletions(-) create mode 100644 src/Custom/Responses/ModelIdResponses.cs rename src/Generated/Models/{ => Responses}/ModelIdsResponses.cs (99%) rename src/Generated/{OpenAIResponseClient.RestClient.cs => ResponseClient.RestClient.cs} (98%) rename src/Generated/{OpenAIResponseClient.cs => ResponseClient.cs} (90%) rename src/Generated/{OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult.cs => ResponseClientGetResponseInputItemsAsyncCollectionResult.cs} (85%) rename src/Generated/{OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs => ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs} (86%) rename src/Generated/{OpenAIResponseClientGetResponseInputItemsCollectionResult.cs => ResponseClientGetResponseInputItemsCollectionResult.cs} (85%) rename src/Generated/{OpenAIResponseClientGetResponseInputItemsCollectionResultOfT.cs => ResponseClientGetResponseInputItemsCollectionResultOfT.cs} (86%) diff --git a/examples/Responses/Example01_SimpleResponse.cs b/examples/Responses/Example01_SimpleResponse.cs index 03d72e94f..de2362bdb 100644 --- a/examples/Responses/Example01_SimpleResponse.cs +++ b/examples/Responses/Example01_SimpleResponse.cs @@ -13,7 +13,7 @@ public partial class ResponseExamples [Test] public void Example01_SimpleResponse() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example01_SimpleResponseAsync.cs b/examples/Responses/Example01_SimpleResponseAsync.cs index 5ef3532a6..0d1241a85 100644 --- a/examples/Responses/Example01_SimpleResponseAsync.cs +++ b/examples/Responses/Example01_SimpleResponseAsync.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public async Task Example01_SimpleResponseAsync() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example02_SimpleResponseStreaming.cs b/examples/Responses/Example02_SimpleResponseStreaming.cs index 3c50039aa..9bf10d7d4 100644 --- a/examples/Responses/Example02_SimpleResponseStreaming.cs +++ b/examples/Responses/Example02_SimpleResponseStreaming.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example02_SimpleResponseStreaming() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CollectionResult responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs index 6cef0f978..77685f9dc 100644 --- a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs +++ b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs @@ -16,7 +16,7 @@ public partial class ResponseExamples [Test] public async Task Example02_SimpleResponseStreamingAsync() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example03_FunctionCalling.cs b/examples/Responses/Example03_FunctionCalling.cs index 872625b49..64f86fbe2 100644 --- a/examples/Responses/Example03_FunctionCalling.cs +++ b/examples/Responses/Example03_FunctionCalling.cs @@ -62,7 +62,7 @@ private static string GetCurrentWeather(string location, string unit = "celsius" [Test] public void Example03_FunctionCalling() { - OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example03_FunctionCallingAsync.cs b/examples/Responses/Example03_FunctionCallingAsync.cs index 19101ff66..1eb0a6786 100644 --- a/examples/Responses/Example03_FunctionCallingAsync.cs +++ b/examples/Responses/Example03_FunctionCallingAsync.cs @@ -19,7 +19,7 @@ public partial class ResponseExamples [Test] public async Task Example03_FunctionCallingAsync() { - OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example04_FunctionCallingStreaming.cs b/examples/Responses/Example04_FunctionCallingStreaming.cs index 72a109e8e..43f6f81aa 100644 --- a/examples/Responses/Example04_FunctionCallingStreaming.cs +++ b/examples/Responses/Example04_FunctionCallingStreaming.cs @@ -19,7 +19,7 @@ public partial class ResponseExamples [Test] public void Example04_FunctionCallingStreaming() { - OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs index a8f6885b5..2153fb9f0 100644 --- a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs +++ b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs @@ -20,7 +20,7 @@ public partial class ResponseExamples [Test] public async Task Example04_FunctionCallingStreamingAsync() { - OpenAIResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example05_RemoteMcp.cs b/examples/Responses/Example05_RemoteMcp.cs index aae2f9d94..2962168c5 100644 --- a/examples/Responses/Example05_RemoteMcp.cs +++ b/examples/Responses/Example05_RemoteMcp.cs @@ -26,7 +26,7 @@ public void Example05_RemoteMcp() } }; - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); diff --git a/examples/Responses/Example05_RemoteMcpAsync.cs b/examples/Responses/Example05_RemoteMcpAsync.cs index 860d2b325..ab9f042f7 100644 --- a/examples/Responses/Example05_RemoteMcpAsync.cs +++ b/examples/Responses/Example05_RemoteMcpAsync.cs @@ -27,7 +27,7 @@ public async Task Example05_RemoteMcpAsync() } }; - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); diff --git a/examples/Responses/Example06_RemoteMcpAuthentication.cs b/examples/Responses/Example06_RemoteMcpAuthentication.cs index 4dea86566..c6f5a6383 100644 --- a/examples/Responses/Example06_RemoteMcpAuthentication.cs +++ b/examples/Responses/Example06_RemoteMcpAuthentication.cs @@ -24,7 +24,7 @@ public void Example06_RemoteMcpAuthentication() } }; - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); diff --git a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs index f6f0c03db..486ee8843 100644 --- a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs +++ b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs @@ -26,7 +26,7 @@ public async Task Example06_RemoteMcpAuthenticationAsync() } }; - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); diff --git a/examples/Responses/Example07_InputAdditionalProperties.cs b/examples/Responses/Example07_InputAdditionalProperties.cs index e7ed5986e..8757fff80 100644 --- a/examples/Responses/Example07_InputAdditionalProperties.cs +++ b/examples/Responses/Example07_InputAdditionalProperties.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example07_InputAdditionalProperties() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. diff --git a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs index 35836b233..d8e948cb2 100644 --- a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public async Task Example07_InputAdditionalPropertiesAsync() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. diff --git a/examples/Responses/Example08_OutputAdditionalProperties.cs b/examples/Responses/Example08_OutputAdditionalProperties.cs index 8e0e3427e..a4fce859e 100644 --- a/examples/Responses/Example08_OutputAdditionalProperties.cs +++ b/examples/Responses/Example08_OutputAdditionalProperties.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public void Example08_OutputAdditionalProperties() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") diff --git a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs index 97f7ab9aa..1486d5730 100644 --- a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs @@ -16,7 +16,7 @@ public partial class ResponseExamples [Test] public async Task Example08_OutputAdditionalPropertiesAsync() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") diff --git a/examples/Responses/Example09_ModelOverridePerRequest.cs b/examples/Responses/Example09_ModelOverridePerRequest.cs index ff96692b6..18ac585ce 100644 --- a/examples/Responses/Example09_ModelOverridePerRequest.cs +++ b/examples/Responses/Example09_ModelOverridePerRequest.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example09_ModelOverridePerRequest() { - OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. diff --git a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs index 05b7c892c..1062ea3af 100644 --- a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs +++ b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public async Task Example09_ModelOverridePerRequestAsync() { - OpenAIResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. diff --git a/examples/Responses/Example10_CodeInterpreter.cs b/examples/Responses/Example10_CodeInterpreter.cs index 489e5cf09..8a23d2f66 100644 --- a/examples/Responses/Example10_CodeInterpreter.cs +++ b/examples/Responses/Example10_CodeInterpreter.cs @@ -17,7 +17,7 @@ public partial class ResponseExamples [Test] public void Example10_CodeInterpreter() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); diff --git a/examples/Responses/Example10_CodeInterpreterAsync.cs b/examples/Responses/Example10_CodeInterpreterAsync.cs index 8945a560e..48fb522b9 100644 --- a/examples/Responses/Example10_CodeInterpreterAsync.cs +++ b/examples/Responses/Example10_CodeInterpreterAsync.cs @@ -18,7 +18,7 @@ public partial class ResponseExamples [Test] public async Task Example10_CodeInterpreterAsync() { - OpenAIResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); diff --git a/src/Custom/OpenAIClient.cs b/src/Custom/OpenAIClient.cs index 5e6a2723f..fd60ca4ee 100644 --- a/src/Custom/OpenAIClient.cs +++ b/src/Custom/OpenAIClient.cs @@ -332,16 +332,16 @@ protected internal OpenAIClient(ClientPipeline pipeline, OpenAIClientOptions opt public virtual RealtimeClient GetRealtimeClient() => new(_keyCredential, _options); /// - /// Gets a new instance of that reuses the client configuration details provided to + /// Gets a new instance of that reuses the client configuration details provided to /// the instance. /// /// - /// This method is functionally equivalent to using the constructor directly with + /// This method is functionally equivalent to using the constructor directly with /// the same configuration details. /// - /// A new . + /// A new . [Experimental("OPENAI001")] - public virtual OpenAIResponseClient GetOpenAIResponseClient(string model) => new(Pipeline, model, _options); + public virtual ResponseClient GetOpenAIResponseClient(string model) => new(Pipeline, model, _options); /// /// Gets a new instance of that reuses the client configuration details provided to diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index 76eaf1f14..dee2c3e46 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -95,7 +95,7 @@ internal CreateResponseOptions(IDictionary metadata, float? temp public bool? Stream { get; set; } - internal static CreateResponseOptions Create(IEnumerable inputItems, OpenAIResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) + internal static CreateResponseOptions Create(IEnumerable inputItems, ResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) { Argument.AssertNotNull(inputItems, nameof(inputItems)); options ??= new(); diff --git a/src/Custom/Responses/ModelIdResponses.cs b/src/Custom/Responses/ModelIdResponses.cs new file mode 100644 index 000000000..6221c481e --- /dev/null +++ b/src/Custom/Responses/ModelIdResponses.cs @@ -0,0 +1,7 @@ +namespace OpenAI.Responses; + +[CodeGenType("ModelIdsResponses")] +public readonly partial struct ModelIdsResponses +{ + +} \ No newline at end of file diff --git a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs index fbc0684a9..ddd1a9499 100644 --- a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs +++ b/src/Custom/Responses/OpenAIResponseClient.Protocol.cs @@ -11,7 +11,7 @@ namespace OpenAI.Responses; [CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] [CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -public partial class OpenAIResponseClient +public partial class ResponseClient { public virtual async Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options) { diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/OpenAIResponseClient.cs index d3eb0af45..b49fccd67 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/OpenAIResponseClient.cs @@ -23,17 +23,17 @@ namespace OpenAI.Responses; [CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] [CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] -public partial class OpenAIResponseClient +public partial class ResponseClient { private readonly string _model; // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The API key to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public OpenAIResponseClient(string model, string apiKey) : this(model, new ApiKeyCredential(apiKey), new OpenAIClientOptions()) + public ResponseClient(string model, string apiKey) : this(model, new ApiKeyCredential(apiKey), new OpenAIClientOptions()) { } @@ -41,12 +41,12 @@ public partial class OpenAIResponseClient // - Added `model` parameter. // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public OpenAIResponseClient(string model, ApiKeyCredential credential) : this(model, credential, new OpenAIClientOptions()) + public ResponseClient(string model, ApiKeyCredential credential) : this(model, credential, new OpenAIClientOptions()) { } @@ -54,34 +54,34 @@ public partial class OpenAIResponseClient // - Added `model` parameter. // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - public OpenAIResponseClient(string model, ApiKeyCredential credential, OpenAIClientOptions options) : this(model, OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) + public ResponseClient(string model, ApiKeyCredential credential, OpenAIClientOptions options) : this(model, OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) { } // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy) : this(model, authenticationPolicy, new OpenAIClientOptions()) + public ResponseClient(string model, AuthenticationPolicy authenticationPolicy) : this(model, authenticationPolicy, new OpenAIClientOptions()) { } // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) + public ResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) { Argument.AssertNotNullOrEmpty(model, nameof(model)); Argument.AssertNotNull(authenticationPolicy, nameof(authenticationPolicy)); @@ -97,13 +97,13 @@ public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPol // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. // - Made protected. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The HTTP pipeline to send and receive REST requests and responses. /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options) + protected internal ResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options) { Argument.AssertNotNull(pipeline, nameof(pipeline)); options ??= new OpenAIClientOptions(); @@ -411,24 +411,24 @@ public virtual ClientResult CancelResponse(string responseId, Ca return ClientResult.FromValue(convenienceResult, protocolResult.GetRawResponse()); } - public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); PipelineMessage message = CreateGetResponseInputItemsRequest(options.ResponseId, options.Limit, options.After, options.Order, options.Before, cancellationToken.ToRequestOptions()); ClientResult result = ClientResult.FromResponse(Pipeline.ProcessMessage(message, cancellationToken.ToRequestOptions())); - return ClientResult.FromValue((ResponseItemList)result, result.GetRawResponse()); + return ClientResult.FromValue((ResponseItemCollection)result, result.GetRawResponse()); } - public virtual async Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) + public virtual async Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); PipelineMessage message = CreateGetResponseInputItemsRequest(options.ResponseId, options.Limit, options.After, options.Order, options.Before, cancellationToken.ToRequestOptions()); ClientResult result = ClientResult.FromResponse(await Pipeline.ProcessMessageAsync(message, cancellationToken.ToRequestOptions()).ConfigureAwait(false)); - return ClientResult.FromValue((ResponseItemList)result, result.GetRawResponse()); + return ClientResult.FromValue((ResponseItemCollection)result, result.GetRawResponse()); } internal virtual ResponseCreationOptions CreatePerCallOptions(ResponseCreationOptions userOptions, IEnumerable inputItems, bool stream = false) diff --git a/src/Custom/Responses/ResponseItemList.Serialization.cs b/src/Custom/Responses/ResponseItemList.Serialization.cs index cbf658b88..93dea10a2 100644 --- a/src/Custom/Responses/ResponseItemList.Serialization.cs +++ b/src/Custom/Responses/ResponseItemList.Serialization.cs @@ -7,13 +7,13 @@ namespace OpenAI.Responses { - public partial class ResponseItemList : IJsonModel + public partial class ResponseItemCollection : IJsonModel { - internal ResponseItemList() : this(null, null, default, null, null, default) + internal ResponseItemCollection() : this(null, null, default, null, null, default) { } - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) { #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. if (Patch.Contains("$"u8)) @@ -30,10 +30,10 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriter protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(ResponseItemList)} does not support writing '{format}' format."); + throw new FormatException($"The model {nameof(ResponseItemCollection)} does not support writing '{format}' format."); } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. if (!Patch.Contains("$.object"u8)) @@ -84,20 +84,20 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. } - ResponseItemList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + ResponseItemCollection IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + protected virtual ResponseItemCollection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; if (format != "J") { - throw new FormatException($"The model {nameof(ResponseItemList)} does not support reading '{format}' format."); + throw new FormatException($"The model {nameof(ResponseItemCollection)} does not support reading '{format}' format."); } using JsonDocument document = JsonDocument.ParseValue(ref reader); return DeserializeResponseItemList(document.RootElement, null, options); } - internal static ResponseItemList DeserializeResponseItemList(JsonElement element, BinaryData data, ModelReaderWriterOptions options) + internal static ResponseItemCollection DeserializeResponseItemList(JsonElement element, BinaryData data, ModelReaderWriterOptions options) { if (element.ValueKind == JsonValueKind.Null) { @@ -145,7 +145,7 @@ internal static ResponseItemList DeserializeResponseItemList(JsonElement element } patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); } - return new ResponseItemList( + return new ResponseItemCollection( @object, data0, hasMore, @@ -154,25 +154,25 @@ internal static ResponseItemList DeserializeResponseItemList(JsonElement element patch); } - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": return ModelReaderWriter.Write(this, options, OpenAIContext.Default); default: - throw new FormatException($"The model {nameof(ResponseItemList)} does not support writing '{options.Format}' format."); + throw new FormatException($"The model {nameof(ResponseItemCollection)} does not support writing '{options.Format}' format."); } } - ResponseItemList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + ResponseItemCollection IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + protected virtual ResponseItemCollection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; switch (format) { case "J": @@ -181,13 +181,13 @@ protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, M return DeserializeResponseItemList(document.RootElement, data, options); } default: - throw new FormatException($"The model {nameof(ResponseItemList)} does not support reading '{options.Format}' format."); + throw new FormatException($"The model {nameof(ResponseItemCollection)} does not support reading '{options.Format}' format."); } } - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; - public static explicit operator ResponseItemList(ClientResult result) + public static explicit operator ResponseItemCollection(ClientResult result) { using PipelineResponse response = result.GetRawResponse(); BinaryData data = response.Content; diff --git a/src/Custom/Responses/ResponseItemList.cs b/src/Custom/Responses/ResponseItemList.cs index ea1473162..c0ba8dda7 100644 --- a/src/Custom/Responses/ResponseItemList.cs +++ b/src/Custom/Responses/ResponseItemList.cs @@ -7,12 +7,12 @@ namespace OpenAI.Responses { [Experimental("OPENAI001")] - public partial class ResponseItemList + public partial class ResponseItemCollection { [Experimental("SCME0001")] private JsonPatch _patch; - internal ResponseItemList(IEnumerable data, bool hasMore, string firstId, string lastId) + internal ResponseItemCollection(IEnumerable data, bool hasMore, string firstId, string lastId) { Data = data.ToList(); HasMore = hasMore; @@ -21,7 +21,7 @@ internal ResponseItemList(IEnumerable data, bool hasMore, string f } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal ResponseItemList(string @object, IList data, bool hasMore, string firstId, string lastId, in JsonPatch patch) + internal ResponseItemCollection(string @object, IList data, bool hasMore, string firstId, string lastId, in JsonPatch patch) { // Plugin customization: ensure initialization of collections Object = @object; diff --git a/src/Generated/Models/ModelIdsResponses.cs b/src/Generated/Models/Responses/ModelIdsResponses.cs similarity index 99% rename from src/Generated/Models/ModelIdsResponses.cs rename to src/Generated/Models/Responses/ModelIdsResponses.cs index 649190868..9e8675afc 100644 --- a/src/Generated/Models/ModelIdsResponses.cs +++ b/src/Generated/Models/Responses/ModelIdsResponses.cs @@ -5,8 +5,9 @@ using System; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; +using OpenAI; -namespace OpenAI +namespace OpenAI.Responses { [Experimental("OPENAI001")] public readonly partial struct ModelIdsResponses : IEquatable diff --git a/src/Generated/OpenAIClient.cs b/src/Generated/OpenAIClient.cs index 0dd087439..39ed5d254 100644 --- a/src/Generated/OpenAIClient.cs +++ b/src/Generated/OpenAIClient.cs @@ -5,6 +5,9 @@ using System; using System.ClientModel; using System.ClientModel.Primitives; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using OpenAI.Responses; namespace OpenAI { @@ -14,11 +17,18 @@ public partial class OpenAIClient private readonly ApiKeyCredential _keyCredential; private const string AuthorizationHeader = "Authorization"; private const string AuthorizationApiKeyPrefix = "Bearer"; + private ResponseClient _cachedResponseClient; protected OpenAIClient() { } public ClientPipeline Pipeline { get; } + + [Experimental("OPENAI001")] + public virtual ResponseClient GetResponseClient() + { + return Volatile.Read(ref _cachedResponseClient) ?? Interlocked.CompareExchange(ref _cachedResponseClient, new ResponseClient(Pipeline, _endpoint), null) ?? _cachedResponseClient; + } } } diff --git a/src/Generated/OpenAIResponseClient.RestClient.cs b/src/Generated/ResponseClient.RestClient.cs similarity index 98% rename from src/Generated/OpenAIResponseClient.RestClient.cs rename to src/Generated/ResponseClient.RestClient.cs index 22b300298..0aed8c460 100644 --- a/src/Generated/OpenAIResponseClient.RestClient.cs +++ b/src/Generated/ResponseClient.RestClient.cs @@ -8,7 +8,7 @@ namespace OpenAI.Responses { - public partial class OpenAIResponseClient + public partial class ResponseClient { private static PipelineMessageClassifier _pipelineMessageClassifier200; diff --git a/src/Generated/OpenAIResponseClient.cs b/src/Generated/ResponseClient.cs similarity index 90% rename from src/Generated/OpenAIResponseClient.cs rename to src/Generated/ResponseClient.cs index 435f2c153..85247c3ea 100644 --- a/src/Generated/OpenAIResponseClient.cs +++ b/src/Generated/ResponseClient.cs @@ -13,15 +13,15 @@ namespace OpenAI.Responses { [Experimental("OPENAI001")] - public partial class OpenAIResponseClient + public partial class ResponseClient { private readonly Uri _endpoint; - protected OpenAIResponseClient() + protected ResponseClient() { } - internal OpenAIResponseClient(ClientPipeline pipeline, Uri endpoint) + internal ResponseClient(ClientPipeline pipeline, Uri endpoint) { _endpoint = endpoint; Pipeline = pipeline; @@ -81,7 +81,7 @@ public virtual CollectionResult GetResponseInputItems(string responseId, int? li { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new OpenAIResponseClientGetResponseInputItemsCollectionResult( + return new ResponseClientGetResponseInputItemsCollectionResult( this, responseId, limit, @@ -95,7 +95,7 @@ public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseI { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult( + return new ResponseClientGetResponseInputItemsAsyncCollectionResult( this, responseId, limit, @@ -109,7 +109,7 @@ public virtual CollectionResult GetResponseInputItems(string respo { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new OpenAIResponseClientGetResponseInputItemsCollectionResultOfT( + return new ResponseClientGetResponseInputItemsCollectionResultOfT( this, responseId, options?.PageSizeLimit, @@ -123,7 +123,7 @@ public virtual AsyncCollectionResult GetResponseInputItemsAsync(st { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT( + return new ResponseClientGetResponseInputItemsAsyncCollectionResultOfT( this, responseId, options?.PageSizeLimit, diff --git a/src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult.cs b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs similarity index 85% rename from src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult.cs rename to src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs index 94a91847a..c96580874 100644 --- a/src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs @@ -10,9 +10,9 @@ namespace OpenAI.Responses { - internal partial class OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult : AsyncCollectionResult + internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResult : AsyncCollectionResult { - private readonly OpenAIResponseClient _client; + private readonly ResponseClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class OpenAIResponseClientGetResponseInputItemsAsyncCollectionR private readonly string _before; private readonly RequestOptions _options; - public OpenAIResponseClientGetResponseInputItemsAsyncCollectionResult(OpenAIResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsAsyncCollectionResult(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs similarity index 86% rename from src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs rename to src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs index 9b5e7dd0d..09f90e9ec 100644 --- a/src/Generated/OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs @@ -11,9 +11,9 @@ namespace OpenAI.Responses { - internal partial class OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT : AsyncCollectionResult + internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResultOfT : AsyncCollectionResult { - private readonly OpenAIResponseClient _client; + private readonly ResponseClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -21,7 +21,7 @@ internal partial class OpenAIResponseClientGetResponseInputItemsAsyncCollectionR private readonly string _before; private readonly RequestOptions _options; - public OpenAIResponseClientGetResponseInputItemsAsyncCollectionResultOfT(OpenAIResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsAsyncCollectionResultOfT(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResult.cs b/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs similarity index 85% rename from src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResult.cs rename to src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs index 2f65071e4..95e5a6f33 100644 --- a/src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResult.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs @@ -10,9 +10,9 @@ namespace OpenAI.Responses { - internal partial class OpenAIResponseClientGetResponseInputItemsCollectionResult : CollectionResult + internal partial class ResponseClientGetResponseInputItemsCollectionResult : CollectionResult { - private readonly OpenAIResponseClient _client; + private readonly ResponseClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class OpenAIResponseClientGetResponseInputItemsCollectionResult private readonly string _before; private readonly RequestOptions _options; - public OpenAIResponseClientGetResponseInputItemsCollectionResult(OpenAIResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsCollectionResult(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResultOfT.cs b/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs similarity index 86% rename from src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResultOfT.cs rename to src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs index 8aabb67fe..5a00d7c2b 100644 --- a/src/Generated/OpenAIResponseClientGetResponseInputItemsCollectionResultOfT.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs @@ -10,9 +10,9 @@ namespace OpenAI.Responses { - internal partial class OpenAIResponseClientGetResponseInputItemsCollectionResultOfT : CollectionResult + internal partial class ResponseClientGetResponseInputItemsCollectionResultOfT : CollectionResult { - private readonly OpenAIResponseClient _client; + private readonly ResponseClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class OpenAIResponseClientGetResponseInputItemsCollectionResult private readonly string _before; private readonly RequestOptions _options; - public OpenAIResponseClientGetResponseInputItemsCollectionResultOfT(OpenAIResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsCollectionResultOfT(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/tests/Responses/ResponseStoreTests.cs b/tests/Responses/ResponseStoreTests.cs index 903a531f4..42fedcc98 100644 --- a/tests/Responses/ResponseStoreTests.cs +++ b/tests/Responses/ResponseStoreTests.cs @@ -23,7 +23,7 @@ public ResponseStoreTests(bool isAsync) : base(isAsync) [RecordedTest] public async Task GetInputItemsWithPagination() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Create a response with multiple input items List inputItems = new() @@ -60,7 +60,7 @@ public async Task GetInputItemsWithPagination() [RecordedTest] public async Task GetInputItemsWithMultiPartPagination() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); @@ -112,7 +112,7 @@ public async Task GetInputItemsWithMultiPartPagination() [RecordedTest] public async Task GetInputItemsWithAfterIdPagination() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Ensure multiple input items exist to paginate List inputItems = new() @@ -153,7 +153,7 @@ public async Task GetInputItemsWithAfterIdPagination() [RecordedTest] public async Task GetInputItemsWithOrderFiltering() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Create inputs in a defined sequence List inputItems = new() @@ -202,7 +202,7 @@ public async Task GetInputItemsWithOrderFiltering() [RecordedTest] public async Task GetInputItemsHandlesLargeLimits() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -227,7 +227,7 @@ public async Task GetInputItemsHandlesLargeLimits() [RecordedTest] public async Task GetInputItemsWithMinimalLimits() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -252,7 +252,7 @@ public async Task GetInputItemsWithMinimalLimits() [RecordedTest] public async Task GetInputItemsWithCancellationToken() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -288,7 +288,7 @@ public async Task GetInputItemsWithCancellationToken() [RecordedTest] public async Task GetInputItemsWithCombinedOptions() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -316,5 +316,5 @@ public async Task GetInputItemsWithCombinedOptions() Assert.That(items, Has.Count.GreaterThan(0)); } - private OpenAIResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index 3fe4d2403..80bd2719f 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -75,7 +75,7 @@ private void Validate(T input) where T : class [RecordedTest] public async Task ComputerToolWithScreenshotRoundTrip() { - OpenAIResponseClient client = GetTestClient("computer-use-preview-2025-03-11"); + ResponseClient client = GetTestClient("computer-use-preview-2025-03-11"); ResponseTool computerTool = ResponseTool.CreateComputerTool(ComputerToolEnvironment.Windows, 1024, 768); CreateResponseOptions responseOptions = new( [ @@ -138,7 +138,7 @@ public async Task ComputerToolWithScreenshotRoundTrip() [RecordedTest] public async Task WebSearchCall() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Searching the internet, what's the weather like in Seattle?")]) { @@ -165,7 +165,7 @@ public async Task WebSearchCall() [RecordedTest] public async Task WebSearchCallPreview() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("What was a positive news story from today?")]) { @@ -192,7 +192,7 @@ public async Task WebSearchCallPreview() [RecordedTest] public async Task WebSearchCallStreaming() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); const string message = "Searching the internet, what's the weather like in San Francisco?"; @@ -260,7 +260,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ResponseWithImageGenTool() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -298,7 +298,7 @@ public async Task ResponseWithImageGenTool() [RecordedTest] public async Task ImageGenToolStreaming() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; @@ -381,7 +381,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolInputMaskWithImageBytes() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); @@ -417,7 +417,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() [RecordedTest] public async Task ImageGenToolInputMaskWithImageUri() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -451,7 +451,7 @@ public async Task ImageGenToolInputMaskWithImageUri() [RecordedTest] public async Task ImageGenToolInputMaskWithFileId() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); @@ -502,7 +502,7 @@ public async Task ImageGenToolInputMaskWithFileId() [RecordedTest] public async Task StreamingResponses() { - OpenAIResponseClient client = GetTestClient("gpt-4o-mini"); // "computer-use-alpha"); + ResponseClient client = GetTestClient("gpt-4o-mini"); // "computer-use-alpha"); List inputItems = [ResponseItem.CreateUserMessageItem("Hello, world!")]; List deltaTextSegments = []; @@ -530,7 +530,7 @@ public async Task StreamingResponses() [RecordedTest] public async Task StreamingResponsesWithReasoningSummary() { - OpenAIResponseClient client = GetTestClient("o3-mini"); + ResponseClient client = GetTestClient("o3-mini"); List inputItems = [ResponseItem.CreateUserMessageItem("I’m visiting New York for 3 days and love food and art. What’s the best way to plan my trip?")]; CreateResponseOptions options = new(inputItems) @@ -592,7 +592,7 @@ public async Task StreamingResponsesWithReasoningSummary() [TestCase("computer-use-preview")] public async Task ResponsesHelloWorldWithTool(string model) { - OpenAIResponseClient client = GetTestClient(model); + ResponseClient client = GetTestClient(model); CreateResponseOptions options = new( [ @@ -639,7 +639,7 @@ public async Task ResponsesHelloWorldWithTool(string model) [RecordedTest] public async Task ResponsesWithReasoning() { - OpenAIResponseClient client = GetTestClient("o3-mini"); + ResponseClient client = GetTestClient("o3-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")]) { @@ -681,7 +681,7 @@ public async Task ResponsesWithReasoning() [TestCase("gpt-4o-mini")] public async Task HelloWorldStreaming(string model) { - OpenAIResponseClient client = GetTestClient(model); + ResponseClient client = GetTestClient(model); ResponseContentPart contentPart = ResponseContentPart.CreateInputTextPart("Hello, responses!"); @@ -701,7 +701,7 @@ in client.CreateResponseStreamingAsync( [RecordedTest] public async Task CanDeleteResponse() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new([ResponseItem.CreateUserMessageItem("Hello, model!")])); @@ -722,7 +722,7 @@ async Task RetrieveThatResponseAsync() [RecordedTest] public async Task CanOptOutOfStorage() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new ([ResponseItem.CreateUserMessageItem("Hello, model!")]) @@ -737,7 +737,7 @@ public async Task CanOptOutOfStorage() [RecordedTest] public async Task ResponseServiceTierWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); MessageResponseItem message = ResponseItem.CreateUserMessageItem("Using a comprehensive evaluation of popular media in the 1970s and 1980s, what were the most common sci-fi themes?"); CreateResponseOptions options = new([message]) @@ -753,7 +753,7 @@ public async Task ResponseServiceTierWorks() [RecordedTest] public async Task OutputTextMethod() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); var outputText = response.GetOutputText(); @@ -782,7 +782,7 @@ public async Task OutputTextMethod() [RecordedTest] public async Task MessageHistoryWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -799,7 +799,7 @@ public async Task MessageHistoryWorks() [RecordedTest] public async Task ImageInputWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); string imagePath = Path.Join("Assets", "images_dog_and_cat.png"); BinaryData imageBytes = BinaryData.FromBytes(await File.ReadAllBytesAsync(imagePath)); @@ -820,7 +820,7 @@ public async Task ImageInputWorks() [RecordedTest] public async Task FileInputFromIdWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); @@ -849,7 +849,7 @@ public async Task FileInputFromIdWorks() [RecordedTest] public async Task FileInputFromBinaryWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); Stream fileStream = File.OpenRead(filePath); @@ -895,7 +895,7 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr options.Instructions = instructions; } - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); @@ -939,8 +939,8 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr [RecordedTest] public async Task TwoTurnCrossModel() { - OpenAIResponseClient client = GetTestClient("gpt-4o-mini"); - OpenAIResponseClient client2 = GetTestClient("o3-mini"); + ResponseClient client = GetTestClient("gpt-4o-mini"); + ResponseClient client2 = GetTestClient("o3-mini"); ResponseResult response = await client.CreateResponseAsync(new( @@ -957,7 +957,7 @@ public async Task TwoTurnCrossModel() [TestCase("computer-use-preview", Ignore = "Not yet supported with computer-use-preview")] public async Task StructuredOutputs(string modelName) { - OpenAIResponseClient client = GetTestClient(modelName); + ResponseClient client = GetTestClient(modelName); ResponseResult response = await client.CreateResponseAsync( new ([ResponseItem.CreateUserMessageItem("Write a JSON document with a list of five animals")]) @@ -1003,7 +1003,7 @@ public async Task StructuredOutputs(string modelName) [RecordedTest] public async Task FunctionCallWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { @@ -1049,7 +1049,7 @@ public async Task FunctionCallWorks() [RecordedTest] public async Task FunctionCallStreamingWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { @@ -1098,7 +1098,7 @@ public async Task FunctionCallStreamingWorks() [RecordedTest] public async Task MaxTokens() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Write three haikus about tropical fruit")]) @@ -1117,7 +1117,7 @@ public async Task MaxTokens() [RecordedTest] public async Task FunctionToolChoiceWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseToolChoice toolChoice = ResponseToolChoice.CreateFunctionChoice(s_GetWeatherAtLocationToolName); @@ -1144,7 +1144,7 @@ ResponseToolChoice toolChoice [RecordedTest] public async Task CanStreamBackgroundResponses() { - OpenAIResponseClient client = GetTestClient("gpt-4.1-mini"); + ResponseClient client = GetTestClient("gpt-4.1-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { @@ -1205,7 +1205,7 @@ public async Task CanStreamBackgroundResponses() [RecordedTest] public async Task CanCancelBackgroundResponses() { - OpenAIResponseClient client = GetTestClient("gpt-4.1-mini"); + ResponseClient client = GetTestClient("gpt-4.1-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { @@ -1248,5 +1248,5 @@ public async Task CanCancelBackgroundResponses() """), strictModeEnabled: false); - private OpenAIResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Responses/ResponsesToolTests.cs b/tests/Responses/ResponsesToolTests.cs index 4b641ada3..49bbb094f 100644 --- a/tests/Responses/ResponsesToolTests.cs +++ b/tests/Responses/ResponsesToolTests.cs @@ -45,7 +45,7 @@ public async Task MCPToolWorks() } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response.Output, Has.Count.GreaterThan(0)); @@ -97,7 +97,7 @@ public async Task MCPToolStreamingWorks() } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(options); @@ -211,7 +211,7 @@ public async Task MCPToolNeverRequiresApproval(bool useGlobalPolicy) } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response.Output, Has.Count.GreaterThan(0)); @@ -252,7 +252,7 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response1 = await client.CreateResponseAsync(options); Assert.That(response1.Output, Has.Count.GreaterThan(0)); @@ -297,7 +297,7 @@ public async Task MCPToolWithAllowedTools() } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response.Output, Has.Count.GreaterThan(0)); @@ -338,7 +338,7 @@ public async Task MCPToolWithDisallowedTools() } }; - OpenAIResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponseClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response.Output, Has.Count.GreaterThan(0)); @@ -372,7 +372,7 @@ public async Task FileSearch() await Task.Delay(TimeSpan.FromSeconds(5)); } - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new([ResponseItem.CreateUserMessageItem("Using the file search tool, what's Travis's favorite food?")]) @@ -407,7 +407,7 @@ public async Task FileSearch() [RecordedTest] public async Task CodeInterpreterToolWithoutFileIds() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) @@ -437,7 +437,7 @@ public async Task CodeInterpreterToolWithoutFileIds() [RecordedTest] public async Task CodeInterpreterToolWithEmptyFileIds() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(new AutomaticCodeInterpreterToolContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.")]) @@ -469,7 +469,7 @@ public async Task CodeInterpreterToolWithEmptyFileIds() public async Task CodeInterpreterToolWithContainerIdFromContainerApi() { ContainerClient containerClient = GetProxiedOpenAIClient(TestScenario.Containers); - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Create a container first using the Containers API CreateContainerBody containerBody = new("test-container-for-code-interpreter"); @@ -524,7 +524,7 @@ public async Task CodeInterpreterToolWithContainerIdFromContainerApi() public async Task CodeInterpreterToolWithUploadedFileIds() { OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Create some test files to upload string csvContent = "name,age,city\nAlice,30,New York\nBob,25,Los Angeles\nCharlie,35,Chicago"; @@ -585,7 +585,7 @@ public async Task CodeInterpreterToolWithUploadedFileIds() [RecordedTest] public async Task CodeInterpreterToolStreaming() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(new AutomaticCodeInterpreterToolContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code and show me the code step by step.")]) @@ -619,7 +619,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) public async Task CodeInterpreterToolStreamingWithFiles() { OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); // Create test CSV data string csvContent = "x,y\n1,2\n2,4\n3,6\n4,8\n5,10"; @@ -682,7 +682,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolWorks() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -719,7 +719,7 @@ public async Task ImageGenToolWorks() [RecordedTest] public async Task ImageGenToolStreaming() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; @@ -802,7 +802,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolInputMaskWithImageBytes() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); @@ -837,7 +837,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() [RecordedTest] public async Task ImageGenToolInputMaskWithImageUri() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -870,7 +870,7 @@ public async Task ImageGenToolInputMaskWithImageUri() [RecordedTest] public async Task ImageGenToolInputMaskWithFileId() { - OpenAIResponseClient client = GetTestClient(); + ResponseClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); @@ -987,5 +987,5 @@ private static void ValidateCodeInterpreterEvent(ref int inProgressCount, ref in } } - private OpenAIResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Utility/TestHelpers.cs b/tests/Utility/TestHelpers.cs index 01ac3b7f1..6f148e5cf 100644 --- a/tests/Utility/TestHelpers.cs +++ b/tests/Utility/TestHelpers.cs @@ -121,7 +121,7 @@ public static T GetTestClient( TestScenario.Realtime => new RealtimeClient(credential, options), #pragma warning restore #pragma warning disable OPENAI003 - TestScenario.Responses => new OpenAIResponseClient(model, credential, options), + TestScenario.Responses => new ResponseClient(model, credential, options), #pragma warning restore _ => throw new NotImplementedException(), }; From 52df8099c41ef7fe5ff206850cdae76f3ab6a3d2 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Wed, 12 Nov 2025 12:53:04 -0600 Subject: [PATCH 10/15] fb --- .../Responses/Example01_SimpleResponse.cs | 4 +- .../Example01_SimpleResponseAsync.cs | 4 +- .../Example02_SimpleResponseStreaming.cs | 2 +- .../Example02_SimpleResponseStreamingAsync.cs | 2 +- .../Responses/Example03_FunctionCalling.cs | 8 +- .../Example03_FunctionCallingAsync.cs | 8 +- .../Example04_FunctionCallingStreaming.cs | 2 +- ...Example04_FunctionCallingStreamingAsync.cs | 2 +- examples/Responses/Example05_RemoteMcp.cs | 4 +- .../Responses/Example05_RemoteMcpAsync.cs | 4 +- .../Example06_RemoteMcpAuthentication.cs | 4 +- .../Example06_RemoteMcpAuthenticationAsync.cs | 4 +- .../Example07_InputAdditionalProperties.cs | 4 +- ...xample07_InputAdditionalPropertiesAsync.cs | 4 +- .../Example08_OutputAdditionalProperties.cs | 4 +- ...ample08_OutputAdditionalPropertiesAsync.cs | 4 +- .../Example09_ModelOverridePerRequest.cs | 4 +- .../Example09_ModelOverridePerRequestAsync.cs | 4 +- .../Responses/Example10_CodeInterpreter.cs | 4 +- .../Example10_CodeInterpreterAsync.cs | 4 +- src/Custom/OpenAIClient.cs | 8 +- .../CreateResponseOptions.Serialization.cs | 44 ++-- src/Custom/Responses/CreateResponseOptions.cs | 98 +++++++-- .../Responses/ResponseResult.Serialization.cs | 52 ++--- src/Custom/Responses/ResponseResult.cs | 126 ++++++++--- ...rotocol.cs => ResponsesClient.Protocol.cs} | 2 +- ...AIResponseClient.cs => ResponsesClient.cs} | 28 +-- src/Generated/OpenAIClient.cs | 6 +- src/Generated/ResponseClient.RestClient.cs | 2 +- src/Generated/ResponseClient.cs | 6 +- ...ResponseInputItemsAsyncCollectionResult.cs | 4 +- ...ponseInputItemsAsyncCollectionResultOfT.cs | 4 +- ...ntGetResponseInputItemsCollectionResult.cs | 4 +- ...etResponseInputItemsCollectionResultOfT.cs | 4 +- tests/Responses/ResponseStoreTests.cs | 18 +- tests/Responses/ResponsesTests.cs | 204 +++++++++--------- tests/Responses/ResponsesToolTests.cs | 154 ++++++------- tests/Utility/TestHelpers.cs | 2 +- 38 files changed, 492 insertions(+), 354 deletions(-) rename src/Custom/Responses/{OpenAIResponseClient.Protocol.cs => ResponsesClient.Protocol.cs} (98%) rename src/Custom/Responses/{OpenAIResponseClient.cs => ResponsesClient.cs} (96%) diff --git a/examples/Responses/Example01_SimpleResponse.cs b/examples/Responses/Example01_SimpleResponse.cs index de2362bdb..c8bdc733c 100644 --- a/examples/Responses/Example01_SimpleResponse.cs +++ b/examples/Responses/Example01_SimpleResponse.cs @@ -13,11 +13,11 @@ public partial class ResponseExamples [Test] public void Example01_SimpleResponse() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example01_SimpleResponseAsync.cs b/examples/Responses/Example01_SimpleResponseAsync.cs index 0d1241a85..3331be231 100644 --- a/examples/Responses/Example01_SimpleResponseAsync.cs +++ b/examples/Responses/Example01_SimpleResponseAsync.cs @@ -14,11 +14,11 @@ public partial class ResponseExamples [Test] public async Task Example01_SimpleResponseAsync() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example02_SimpleResponseStreaming.cs b/examples/Responses/Example02_SimpleResponseStreaming.cs index 9bf10d7d4..83b1306cd 100644 --- a/examples/Responses/Example02_SimpleResponseStreaming.cs +++ b/examples/Responses/Example02_SimpleResponseStreaming.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example02_SimpleResponseStreaming() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CollectionResult responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs index 77685f9dc..7a19f1f62 100644 --- a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs +++ b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs @@ -16,7 +16,7 @@ public partial class ResponseExamples [Test] public async Task Example02_SimpleResponseStreamingAsync() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); diff --git a/examples/Responses/Example03_FunctionCalling.cs b/examples/Responses/Example03_FunctionCalling.cs index 64f86fbe2..af18b6314 100644 --- a/examples/Responses/Example03_FunctionCalling.cs +++ b/examples/Responses/Example03_FunctionCalling.cs @@ -62,7 +62,7 @@ private static string GetCurrentWeather(string location, string unit = "celsius" [Test] public void Example03_FunctionCalling() { - ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ @@ -83,9 +83,9 @@ public void Example03_FunctionCalling() requiresAction = false; ResponseResult response = client.CreateResponse(options); - inputItems.AddRange(response.Output); + inputItems.AddRange(response.OutputItems); - foreach (ResponseItem outputItem in response.Output) + foreach (ResponseItem outputItem in response.OutputItems) { if (outputItem is FunctionCallResponseItem functionCall) { @@ -132,7 +132,7 @@ public void Example03_FunctionCalling() } } - PrintMessageItems(response.Output.OfType()); + PrintMessageItems(response.OutputItems.OfType()); } while (requiresAction); } diff --git a/examples/Responses/Example03_FunctionCallingAsync.cs b/examples/Responses/Example03_FunctionCallingAsync.cs index 1eb0a6786..1610746a3 100644 --- a/examples/Responses/Example03_FunctionCallingAsync.cs +++ b/examples/Responses/Example03_FunctionCallingAsync.cs @@ -19,7 +19,7 @@ public partial class ResponseExamples [Test] public async Task Example03_FunctionCallingAsync() { - ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ @@ -40,9 +40,9 @@ public async Task Example03_FunctionCallingAsync() requiresAction = false; ResponseResult response = await client.CreateResponseAsync(options); - inputItems.AddRange(response.Output); + inputItems.AddRange(response.OutputItems); - foreach (ResponseItem outputItem in response.Output) + foreach (ResponseItem outputItem in response.OutputItems) { if (outputItem is FunctionCallResponseItem functionCall) { @@ -89,7 +89,7 @@ public async Task Example03_FunctionCallingAsync() } } - PrintMessageItems(response.Output.OfType()); + PrintMessageItems(response.OutputItems.OfType()); } while (requiresAction); } diff --git a/examples/Responses/Example04_FunctionCallingStreaming.cs b/examples/Responses/Example04_FunctionCallingStreaming.cs index 43f6f81aa..caad5d601 100644 --- a/examples/Responses/Example04_FunctionCallingStreaming.cs +++ b/examples/Responses/Example04_FunctionCallingStreaming.cs @@ -19,7 +19,7 @@ public partial class ResponseExamples [Test] public void Example04_FunctionCallingStreaming() { - ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs index 2153fb9f0..e0db98188 100644 --- a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs +++ b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs @@ -20,7 +20,7 @@ public partial class ResponseExamples [Test] public async Task Example04_FunctionCallingStreamingAsync() { - ResponseClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ diff --git a/examples/Responses/Example05_RemoteMcp.cs b/examples/Responses/Example05_RemoteMcp.cs index 2962168c5..bd6a1430a 100644 --- a/examples/Responses/Example05_RemoteMcp.cs +++ b/examples/Responses/Example05_RemoteMcp.cs @@ -26,11 +26,11 @@ public void Example05_RemoteMcp() } }; - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example05_RemoteMcpAsync.cs b/examples/Responses/Example05_RemoteMcpAsync.cs index ab9f042f7..0d05d61c1 100644 --- a/examples/Responses/Example05_RemoteMcpAsync.cs +++ b/examples/Responses/Example05_RemoteMcpAsync.cs @@ -27,11 +27,11 @@ public async Task Example05_RemoteMcpAsync() } }; - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example06_RemoteMcpAuthentication.cs b/examples/Responses/Example06_RemoteMcpAuthentication.cs index c6f5a6383..b38d506bc 100644 --- a/examples/Responses/Example06_RemoteMcpAuthentication.cs +++ b/examples/Responses/Example06_RemoteMcpAuthentication.cs @@ -24,11 +24,11 @@ public void Example06_RemoteMcpAuthentication() } }; - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs index 486ee8843..66c5a533b 100644 --- a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs +++ b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs @@ -26,11 +26,11 @@ public async Task Example06_RemoteMcpAuthenticationAsync() } }; - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } } diff --git a/examples/Responses/Example07_InputAdditionalProperties.cs b/examples/Responses/Example07_InputAdditionalProperties.cs index 8757fff80..f2b494733 100644 --- a/examples/Responses/Example07_InputAdditionalProperties.cs +++ b/examples/Responses/Example07_InputAdditionalProperties.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example07_InputAdditionalProperties() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. @@ -27,7 +27,7 @@ public void Example07_InputAdditionalProperties() ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); // Read extra fields from the response via Patch. // The service returns fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on OpenAIResponse. diff --git a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs index d8e948cb2..135f8b2ff 100644 --- a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public async Task Example07_InputAdditionalPropertiesAsync() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. @@ -28,7 +28,7 @@ public async Task Example07_InputAdditionalPropertiesAsync() ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); // Read extra fields from the response via Patch. // The service returns fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on OpenAIResponse. diff --git a/examples/Responses/Example08_OutputAdditionalProperties.cs b/examples/Responses/Example08_OutputAdditionalProperties.cs index a4fce859e..5aa39003b 100644 --- a/examples/Responses/Example08_OutputAdditionalProperties.cs +++ b/examples/Responses/Example08_OutputAdditionalProperties.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public void Example08_OutputAdditionalProperties() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") @@ -31,7 +31,7 @@ public void Example08_OutputAdditionalProperties() }; ResponseResult response = client.CreateResponse(options); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[1]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[1]; BinaryData bytes = imageGenResponse.ImageResultBytes; using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png"); diff --git a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs index 1486d5730..3508ecf4f 100644 --- a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs @@ -16,7 +16,7 @@ public partial class ResponseExamples [Test] public async Task Example08_OutputAdditionalPropertiesAsync() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") @@ -32,7 +32,7 @@ public async Task Example08_OutputAdditionalPropertiesAsync() }; ResponseResult response = await client.CreateResponseAsync(options); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[1]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[1]; BinaryData bytes = imageGenResponse.ImageResultBytes; using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png"); diff --git a/examples/Responses/Example09_ModelOverridePerRequest.cs b/examples/Responses/Example09_ModelOverridePerRequest.cs index 18ac585ce..9c401b1c2 100644 --- a/examples/Responses/Example09_ModelOverridePerRequest.cs +++ b/examples/Responses/Example09_ModelOverridePerRequest.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example09_ModelOverridePerRequest() { - ResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. @@ -27,7 +27,7 @@ public void Example09_ModelOverridePerRequest() ResponseResult response = client.CreateResponse(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}, [Mode]: {response.Model}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}, [Mode]: {response.Model}"); } } diff --git a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs index 1062ea3af..b3155f1ab 100644 --- a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs +++ b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public async Task Example09_ModelOverridePerRequestAsync() { - ResponseClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. @@ -28,7 +28,7 @@ public async Task Example09_ModelOverridePerRequestAsync() ResponseResult response = await client.CreateResponseAsync(options); - Console.WriteLine($"[ASSISTANT]: {response.OutputText}, [Mode]: {response.Model}"); + Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}, [Mode]: {response.Model}"); } } diff --git a/examples/Responses/Example10_CodeInterpreter.cs b/examples/Responses/Example10_CodeInterpreter.cs index 8a23d2f66..f127b3bc4 100644 --- a/examples/Responses/Example10_CodeInterpreter.cs +++ b/examples/Responses/Example10_CodeInterpreter.cs @@ -17,7 +17,7 @@ public partial class ResponseExamples [Test] public void Example10_CodeInterpreter() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); @@ -34,7 +34,7 @@ public void Example10_CodeInterpreter() ResponseResult response = client.CreateResponse(options); - MessageResponseItem message = response.Output + MessageResponseItem message = response.OutputItems .OfType() .FirstOrDefault(); diff --git a/examples/Responses/Example10_CodeInterpreterAsync.cs b/examples/Responses/Example10_CodeInterpreterAsync.cs index 48fb522b9..aad96a9f4 100644 --- a/examples/Responses/Example10_CodeInterpreterAsync.cs +++ b/examples/Responses/Example10_CodeInterpreterAsync.cs @@ -18,7 +18,7 @@ public partial class ResponseExamples [Test] public async Task Example10_CodeInterpreterAsync() { - ResponseClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); @@ -31,7 +31,7 @@ public async Task Example10_CodeInterpreterAsync() ResponseResult response = await client.CreateResponseAsync(options); - MessageResponseItem message = response.Output + MessageResponseItem message = response.OutputItems .OfType() .FirstOrDefault(); diff --git a/src/Custom/OpenAIClient.cs b/src/Custom/OpenAIClient.cs index fd60ca4ee..90472bbf9 100644 --- a/src/Custom/OpenAIClient.cs +++ b/src/Custom/OpenAIClient.cs @@ -332,16 +332,16 @@ protected internal OpenAIClient(ClientPipeline pipeline, OpenAIClientOptions opt public virtual RealtimeClient GetRealtimeClient() => new(_keyCredential, _options); /// - /// Gets a new instance of that reuses the client configuration details provided to + /// Gets a new instance of that reuses the client configuration details provided to /// the instance. /// /// - /// This method is functionally equivalent to using the constructor directly with + /// This method is functionally equivalent to using the constructor directly with /// the same configuration details. /// - /// A new . + /// A new . [Experimental("OPENAI001")] - public virtual ResponseClient GetOpenAIResponseClient(string model) => new(Pipeline, model, _options); + public virtual ResponsesClient GetOpenAIResponseClient(string model) => new(Pipeline, model, _options); /// /// Gets a new instance of that reuses the client configuration details provided to diff --git a/src/Custom/Responses/CreateResponseOptions.Serialization.cs b/src/Custom/Responses/CreateResponseOptions.Serialization.cs index 5abd9dfa7..05b5f549b 100644 --- a/src/Custom/Responses/CreateResponseOptions.Serialization.cs +++ b/src/Custom/Responses/CreateResponseOptions.Serialization.cs @@ -77,10 +77,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("top_p"u8); writer.WriteNumberValue(TopP.Value); } - if (Optional.IsDefined(User) && !Patch.Contains("$.user"u8)) + if (Optional.IsDefined(EndUserId) && !Patch.Contains("$.user"u8)) { writer.WritePropertyName("user"u8); - writer.WriteStringValue(User); + writer.WriteStringValue(EndUserId); } if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) { @@ -97,30 +97,30 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("model"u8); writer.WriteStringValue(Model.Value.ToString()); } - if (Optional.IsDefined(Reasoning) && !Patch.Contains("$.reasoning"u8)) + if (Optional.IsDefined(ReasoningOptions) && !Patch.Contains("$.reasoning"u8)) { writer.WritePropertyName("reasoning"u8); - writer.WriteObjectValue(Reasoning, options); + writer.WriteObjectValue(ReasoningOptions, options); } - if (Optional.IsDefined(Background) && !Patch.Contains("$.background"u8)) + if (Optional.IsDefined(IsBackgroundModeEnabled) && !Patch.Contains("$.background"u8)) { writer.WritePropertyName("background"u8); - writer.WriteBooleanValue(Background.Value); + writer.WriteBooleanValue(IsBackgroundModeEnabled.Value); } - if (Optional.IsDefined(MaxOutputTokens) && !Patch.Contains("$.max_output_tokens"u8)) + if (Optional.IsDefined(MaxOutputTokenCount) && !Patch.Contains("$.max_output_tokens"u8)) { writer.WritePropertyName("max_output_tokens"u8); - writer.WriteNumberValue(MaxOutputTokens.Value); + writer.WriteNumberValue(MaxOutputTokenCount.Value); } if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) { writer.WritePropertyName("instructions"u8); writer.WriteStringValue(Instructions); } - if (Optional.IsDefined(Text) && !Patch.Contains("$.text"u8)) + if (Optional.IsDefined(TextOptions) && !Patch.Contains("$.text"u8)) { writer.WritePropertyName("text"u8); - writer.WriteObjectValue(Text, options); + writer.WriteObjectValue(TextOptions, options); } if (Patch.Contains("$.tools"u8)) { @@ -150,10 +150,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("tool_choice"u8); writer.WriteObjectValue(ToolChoice, options); } - if (Optional.IsDefined(Truncation) && !Patch.Contains("$.truncation"u8)) + if (Optional.IsDefined(TruncationMode) && !Patch.Contains("$.truncation"u8)) { writer.WritePropertyName("truncation"u8); - writer.WriteStringValue(Truncation.Value.ToString()); + writer.WriteStringValue(TruncationMode.Value.ToString()); } if (Patch.Contains("$.input"u8)) { @@ -201,20 +201,20 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit Patch.WriteTo(writer, "$.include"u8); writer.WriteEndArray(); } - if (Optional.IsDefined(ParallelToolCalls) && !Patch.Contains("$.parallel_tool_calls"u8)) + if (Optional.IsDefined(IsParallelToolCallsEnabled) && !Patch.Contains("$.parallel_tool_calls"u8)) { writer.WritePropertyName("parallel_tool_calls"u8); - writer.WriteBooleanValue(ParallelToolCalls.Value); + writer.WriteBooleanValue(IsParallelToolCallsEnabled.Value); } - if (Optional.IsDefined(Store) && !Patch.Contains("$.store"u8)) + if (Optional.IsDefined(IsStoredOutputEnabled) && !Patch.Contains("$.store"u8)) { writer.WritePropertyName("store"u8); - writer.WriteBooleanValue(Store.Value); + writer.WriteBooleanValue(IsStoredOutputEnabled.Value); } - if (Optional.IsDefined(Stream) && !Patch.Contains("$.stream"u8)) + if (Optional.IsDefined(IsStreamingEnabled) && !Patch.Contains("$.stream"u8)) { writer.WritePropertyName("stream"u8); - writer.WriteBooleanValue(Stream.Value); + writer.WriteBooleanValue(IsStreamingEnabled.Value); } Patch.WriteTo(writer); @@ -542,11 +542,11 @@ private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValu if (local.StartsWith("reasoning"u8)) { - return Reasoning.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); + return ReasoningOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); } if (local.StartsWith("text"u8)) { - return Text.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); + return TextOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); } if (local.StartsWith("tools"u8)) { @@ -569,12 +569,12 @@ private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue va if (local.StartsWith("reasoning"u8)) { - Reasoning.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); + ReasoningOptions.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); return true; } if (local.StartsWith("text"u8)) { - Text.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); + TextOptions.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); return true; } if (local.StartsWith("tools"u8)) diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index dee2c3e46..dcd001e50 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -29,23 +29,23 @@ internal CreateResponseOptions(IDictionary metadata, float? temp Metadata = metadata ?? new ChangeTrackingDictionary(); Temperature = temperature; TopP = topP; - User = user; + EndUserId = user; ServiceTier = serviceTier; PreviousResponseId = previousResponseId; Model = model; - Reasoning = reasoning; - Background = background; - MaxOutputTokens = maxOutputTokens; + ReasoningOptions = reasoning; + IsBackgroundModeEnabled = background; + MaxOutputTokenCount = maxOutputTokens; Instructions = instructions; - Text = text; + TextOptions = text; Tools = tools ?? new ChangeTrackingList(); ToolChoice = toolChoice; - Truncation = truncation; + TruncationMode = truncation; Input = input; Include = include ?? new ChangeTrackingList(); - ParallelToolCalls = parallelToolCalls; - Store = store; - Stream = stream; + IsParallelToolCallsEnabled = parallelToolCalls; + IsStoredOutputEnabled = store; + IsStreamingEnabled = stream; _patch = patch; _patch.SetPropagators(PropagateSet, PropagateGet); } @@ -55,47 +55,107 @@ internal CreateResponseOptions(IDictionary metadata, float? temp [Experimental("SCME0001")] public ref JsonPatch Patch => ref _patch; + /// + /// Gets a dictionary of custom metadata for the response. This corresponds to the "metadata" property in the JSON representation. + /// public IDictionary Metadata { get; } + /// + /// Gets or sets the sampling temperature to use, between 0 and 2. This corresponds to the "temperature" property in the JSON representation. + /// public float? Temperature { get; set; } + /// + /// Gets or sets the nucleus sampling parameter, between 0 and 1. This corresponds to the "top_p" property in the JSON representation. + /// public float? TopP { get; set; } - public string User { get; set; } + /// + /// Gets or sets a unique identifier representing the end-user. This corresponds to the "user" property in the JSON representation. + /// + public string EndUserId { get; set; } + /// + /// Gets or sets the service tier to be used for processing the request. This corresponds to the "service_tier" property in the JSON representation. + /// public ResponseServiceTier? ServiceTier { get; set; } + /// + /// Gets or sets the ID of the response to continue from, enabling streaming responses. This corresponds to the "previous_response_id" property in the JSON representation. + /// public string PreviousResponseId { get; set; } + /// + /// Gets or sets the model to be used for generating the response. This corresponds to the "model" property in the JSON representation. + /// public ModelIdsResponses? Model { get; set; } - public ResponseReasoningOptions Reasoning { get; set; } + /// + /// Gets or sets the reasoning options for the response. This corresponds to the "reasoning" property in the JSON representation. + /// + public ResponseReasoningOptions ReasoningOptions { get; set; } - public bool? Background { get; set; } + /// + /// Gets or sets whether to run the response in background mode. This corresponds to the "background" property in the JSON representation. + /// + public bool? IsBackgroundModeEnabled { get; set; } - public int? MaxOutputTokens { get; set; } + /// + /// Gets or sets the maximum number of output tokens to generate. This corresponds to the "max_output_tokens" property in the JSON representation. + /// + public int? MaxOutputTokenCount { get; set; } + /// + /// Gets or sets the instructions to guide the response generation. This corresponds to the "instructions" property in the JSON representation. + /// public string Instructions { get; set; } - public ResponseTextOptions Text { get; set; } + /// + /// Gets or sets the text format options for the response. This corresponds to the "text" property in the JSON representation. + /// + public ResponseTextOptions TextOptions { get; set; } + /// + /// Gets a list of tools available to the response. This corresponds to the "tools" property in the JSON representation. + /// public IList Tools { get; } + /// + /// Gets or sets how tool calls should be selected during response generation. This corresponds to the "tool_choice" property in the JSON representation. + /// public ResponseToolChoice ToolChoice { get; set; } - public ResponseTruncationMode? Truncation { get; set; } + /// + /// Gets or sets the truncation mode for the response. This corresponds to the "truncation" property in the JSON representation. + /// + public ResponseTruncationMode? TruncationMode { get; set; } + /// + /// Gets or sets the input items to be processed for the response. This corresponds to the "input" property in the JSON representation. + /// public IList Input { get; internal set; } + /// + /// Gets or sets the list of fields to include in the response. This corresponds to the "include" property in the JSON representation. + /// public IList Include { get; set; } - public bool? ParallelToolCalls { get; set; } + /// + /// Gets or sets whether multiple tool calls can be made in parallel. This corresponds to the "parallel_tool_calls" property in the JSON representation. + /// + public bool? IsParallelToolCallsEnabled { get; set; } - public bool? Store { get; set; } + /// + /// Gets or sets whether the response should be stored for later retrieval. This corresponds to the "store" property in the JSON representation. + /// + public bool? IsStoredOutputEnabled { get; set; } - public bool? Stream { get; set; } + /// + /// Gets or sets whether the response should be streamed. This corresponds to the "stream" property in the JSON representation. + /// + public bool? IsStreamingEnabled { get; set; } - internal static CreateResponseOptions Create(IEnumerable inputItems, ResponseClient client, ResponseCreationOptions options = null, bool isStreaming = false) + internal static CreateResponseOptions Create(IEnumerable inputItems, ResponsesClient client, ResponseCreationOptions options = null, bool isStreaming = false) { Argument.AssertNotNull(inputItems, nameof(inputItems)); options ??= new(); diff --git a/src/Custom/Responses/ResponseResult.Serialization.cs b/src/Custom/Responses/ResponseResult.Serialization.cs index 7119e2714..0b2dfb09d 100644 --- a/src/Custom/Responses/ResponseResult.Serialization.cs +++ b/src/Custom/Responses/ResponseResult.Serialization.cs @@ -88,10 +88,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit { writer.WriteNull("top_p"u8); } - if (Optional.IsDefined(User) && !Patch.Contains("$.user"u8)) + if (Optional.IsDefined(EndUserId) && !Patch.Contains("$.user"u8)) { writer.WritePropertyName("user"u8); - writer.WriteStringValue(User); + writer.WriteStringValue(EndUserId); } else { @@ -112,30 +112,30 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("model"u8); writer.WriteStringValue(InternalModel.Value.ToString()); } - if (Optional.IsDefined(Reasoning) && !Patch.Contains("$.reasoning"u8)) + if (Optional.IsDefined(ReasoningOptions) && !Patch.Contains("$.reasoning"u8)) { writer.WritePropertyName("reasoning"u8); - writer.WriteObjectValue(Reasoning, options); + writer.WriteObjectValue(ReasoningOptions, options); } - if (Optional.IsDefined(Background) && !Patch.Contains("$.background"u8)) + if (Optional.IsDefined(IsBackgroundModeEnabled) && !Patch.Contains("$.background"u8)) { writer.WritePropertyName("background"u8); - writer.WriteBooleanValue(Background.Value); + writer.WriteBooleanValue(IsBackgroundModeEnabled.Value); } - if (Optional.IsDefined(MaxOutputTokens) && !Patch.Contains("$.max_output_tokens"u8)) + if (Optional.IsDefined(MaxOutputTokenCount) && !Patch.Contains("$.max_output_tokens"u8)) { writer.WritePropertyName("max_output_tokens"u8); - writer.WriteNumberValue(MaxOutputTokens.Value); + writer.WriteNumberValue(MaxOutputTokenCount.Value); } if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) { writer.WritePropertyName("instructions"u8); writer.WriteStringValue(Instructions); } - if (Optional.IsDefined(Text) && !Patch.Contains("$.text"u8)) + if (Optional.IsDefined(TextOptions) && !Patch.Contains("$.text"u8)) { writer.WritePropertyName("text"u8); - writer.WriteObjectValue(Text, options); + writer.WriteObjectValue(TextOptions, options); } if (Patch.Contains("$.tools"u8)) { @@ -165,10 +165,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit writer.WritePropertyName("tool_choice"u8); writer.WriteObjectValue(ToolChoice, options); } - if (Optional.IsDefined(Truncation) && !Patch.Contains("$.truncation"u8)) + if (Optional.IsDefined(TruncationMode) && !Patch.Contains("$.truncation"u8)) { writer.WritePropertyName("truncation"u8); - writer.WriteStringValue(Truncation.Value.ToString()); + writer.WriteStringValue(TruncationMode.Value.ToString()); } if (!Patch.Contains("$.id"u8)) { @@ -199,10 +199,10 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit { writer.WriteNull("error"u8); } - if (Optional.IsDefined(IncompleteDetails) && !Patch.Contains("$.incomplete_details"u8)) + if (Optional.IsDefined(IncompleteStatusDetails) && !Patch.Contains("$.incomplete_details"u8)) { writer.WritePropertyName("incomplete_details"u8); - writer.WriteObjectValue(IncompleteDetails, options); + writer.WriteObjectValue(IncompleteStatusDetails, options); } else { @@ -220,13 +220,13 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit { writer.WritePropertyName("output"u8); writer.WriteStartArray(); - for (int i = 0; i < Output.Count; i++) + for (int i = 0; i < OutputItems.Count; i++) { - if (Output[i].Patch.IsRemoved("$"u8)) + if (OutputItems[i].Patch.IsRemoved("$"u8)) { continue; } - writer.WriteObjectValue(Output[i], options); + writer.WriteObjectValue(OutputItems[i], options); } Patch.WriteTo(writer, "$.output"u8); writer.WriteEndArray(); @@ -244,7 +244,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit if (!Patch.Contains("$.parallel_tool_calls"u8)) { writer.WritePropertyName("parallel_tool_calls"u8); - writer.WriteBooleanValue(ParallelToolCalls); + writer.WriteBooleanValue(IsParallelToolCallsEnabled); } Patch.WriteTo(writer); @@ -626,11 +626,11 @@ private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValu if (local.StartsWith("reasoning"u8)) { - return Reasoning.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); + return ReasoningOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); } if (local.StartsWith("text"u8)) { - return Text.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); + return TextOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); } if (local.StartsWith("error"u8)) { @@ -638,7 +638,7 @@ private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValu } if (local.StartsWith("incomplete_details"u8)) { - return IncompleteDetails.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], out value); + return IncompleteStatusDetails.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], out value); } if (local.StartsWith("usage"u8)) { @@ -662,7 +662,7 @@ private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValu { return false; } - return Output[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + return OutputItems[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); } return false; } @@ -675,12 +675,12 @@ private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue va if (local.StartsWith("reasoning"u8)) { - Reasoning.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); + ReasoningOptions.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); return true; } if (local.StartsWith("text"u8)) { - Text.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); + TextOptions.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); return true; } if (local.StartsWith("error"u8)) @@ -690,7 +690,7 @@ private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue va } if (local.StartsWith("incomplete_details"u8)) { - IncompleteDetails.Patch.Set([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], value); + IncompleteStatusDetails.Patch.Set([.. "$"u8, .. local.Slice("incomplete_details"u8.Length)], value); return true; } if (local.StartsWith("usage"u8)) @@ -717,7 +717,7 @@ private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue va { return false; } - Output[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + OutputItems[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); return true; } return false; diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs index 45adcdfdc..347a414bc 100644 --- a/src/Custom/Responses/ResponseResult.cs +++ b/src/Custom/Responses/ResponseResult.cs @@ -19,14 +19,14 @@ internal ResponseResult(IDictionary metadata, float? temperature Metadata = metadata ?? new ChangeTrackingDictionary(); Temperature = temperature; TopP = topP; - User = user; + EndUserId = user; Tools = new ChangeTrackingList(); Id = id; CreatedAt = createdAt; Error = error; - IncompleteDetails = incompleteDetails; - Output = output.ToList(); - ParallelToolCalls = parallelToolCalls; + IncompleteStatusDetails = incompleteDetails; + OutputItems = output.ToList(); + IsParallelToolCallsEnabled = parallelToolCalls; } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. @@ -36,28 +36,28 @@ internal ResponseResult(IDictionary metadata, float? temperature Metadata = metadata ?? new ChangeTrackingDictionary(); Temperature = temperature; TopP = topP; - User = user; + EndUserId = user; ServiceTier = serviceTier; PreviousResponseId = previousResponseId; InternalModel = model; - Reasoning = reasoning; - Background = background; - MaxOutputTokens = maxOutputTokens; + ReasoningOptions = reasoning; + IsBackgroundModeEnabled = background; + MaxOutputTokenCount = maxOutputTokens; Instructions = instructions; - Text = text; + TextOptions = text; Tools = tools ?? new ChangeTrackingList(); ToolChoice = toolChoice; - Truncation = truncation; + TruncationMode = truncation; Id = id; Object = @object; Status = status; CreatedAt = createdAt; Error = error; - IncompleteDetails = incompleteDetails; - Output = output ?? new ChangeTrackingList(); + IncompleteStatusDetails = incompleteDetails; + OutputItems = output ?? new ChangeTrackingList(); OutputText = outputText; Usage = usage; - ParallelToolCalls = parallelToolCalls; + IsParallelToolCallsEnabled = parallelToolCalls; _patch = patch; _patch.SetPropagators(PropagateSet, PropagateGet); } @@ -67,61 +67,139 @@ internal ResponseResult(IDictionary metadata, float? temperature [Experimental("SCME0001")] public ref JsonPatch Patch => ref _patch; + /// + /// Gets a dictionary of custom metadata for the response. This corresponds to the "metadata" property in the JSON representation. + /// public IDictionary Metadata { get; } + /// + /// Gets the sampling temperature that was used, between 0 and 2. This corresponds to the "temperature" property in the JSON representation. + /// public float? Temperature { get; } + /// + /// Gets the nucleus sampling parameter that was used, between 0 and 1. This corresponds to the "top_p" property in the JSON representation. + /// public float? TopP { get; } - public string User { get; } + /// + /// Gets the unique identifier representing the end-user. This corresponds to the "user" property in the JSON representation. + /// + public string EndUserId { get; } + /// + /// Gets the service tier that was used for processing the request. This corresponds to the "service_tier" property in the JSON representation. + /// public ResponseServiceTier? ServiceTier { get; } + /// + /// Gets the ID of the previous response that was continued from, if applicable. This corresponds to the "previous_response_id" property in the JSON representation. + /// public string PreviousResponseId { get; } + /// + /// Gets the internal model identifier that was used for generating the response. + /// public ModelIdsResponses? InternalModel { get; } + /// + /// Gets the model name that was used for generating the response. This corresponds to the "model" property in the JSON representation. + /// public string Model => InternalModel?.ToString(); - public ResponseReasoningOptions Reasoning { get; } + /// + /// Gets the reasoning options that were used for the response. This corresponds to the "reasoning" property in the JSON representation. + /// + public ResponseReasoningOptions ReasoningOptions { get; } - public bool? Background { get; } + /// + /// Gets whether the response was run in background mode. This corresponds to the "background" property in the JSON representation. + /// + public bool? IsBackgroundModeEnabled { get; } - public int? MaxOutputTokens { get; } + /// + /// Gets the maximum number of output tokens that were configured. This corresponds to the "max_output_tokens" property in the JSON representation. + /// + public int? MaxOutputTokenCount { get; } + /// + /// Gets the instructions that were used to guide the response generation. This corresponds to the "instructions" property in the JSON representation. + /// public string Instructions { get; } - public ResponseTextOptions Text { get; } + /// + /// Gets the text format options that were used for the response. This corresponds to the "text" property in the JSON representation. + /// + public ResponseTextOptions TextOptions { get; } + /// + /// Gets a list of tools that were available for the response. This corresponds to the "tools" property in the JSON representation. + /// public IList Tools { get; } + /// + /// Gets how tool calls were selected during response generation. This corresponds to the "tool_choice" property in the JSON representation. + /// public ResponseToolChoice ToolChoice { get; } - public ResponseTruncationMode? Truncation { get; } + /// + /// Gets the truncation mode that was used for the response. This corresponds to the "truncation" property in the JSON representation. + /// + public ResponseTruncationMode? TruncationMode { get; } + /// + /// Gets the unique identifier for the response. This corresponds to the "id" property in the JSON representation. + /// public string Id { get; } + /// + /// Gets the object type identifier for the response. This corresponds to the "object" property in the JSON representation. + /// public string Object { get; } = "ResponseResult"; + /// + /// Gets the status of the response processing. This corresponds to the "status" property in the JSON representation. + /// public ResponseStatus? Status { get; } + /// + /// Gets the timestamp when the response was created. This corresponds to the "created_at" property in the JSON representation. + /// public DateTimeOffset CreatedAt { get; } + /// + /// Gets the error information if the response failed. This corresponds to the "error" property in the JSON representation. + /// public ResponseError Error { get; } - public ResponseIncompleteStatusDetails IncompleteDetails { get; } + /// + /// Gets the details about incomplete status if applicable. This corresponds to the "incomplete_details" property in the JSON representation. + /// + public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; } - public IList Output { get; } + /// + /// Gets the output items generated by the response. This corresponds to the "output" property in the JSON representation. + /// + public IList OutputItems { get; } - public string OutputText { get; } + /// + /// Gets the concatenated text output from the response, if any. + /// + internal string OutputText { get; } + /// + /// Gets the token usage statistics for the response. This corresponds to the "usage" property in the JSON representation. + /// public ResponseTokenUsage Usage { get; } - public bool ParallelToolCalls { get; } + /// + /// Gets whether multiple tool calls were made in parallel. This corresponds to the "parallel_tool_calls" property in the JSON representation. + /// + public bool IsParallelToolCallsEnabled { get; } public string GetOutputText() { - IEnumerable outputTextSegments = Output.Where(item => item is InternalResponsesAssistantMessage) + IEnumerable outputTextSegments = OutputItems.Where(item => item is InternalResponsesAssistantMessage) .Select(item => item as InternalResponsesAssistantMessage) .SelectMany(message => message.Content.Where(contentPart => contentPart.Kind == ResponseContentPartKind.OutputText) .Select(outputTextPart => outputTextPart.Text)); diff --git a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs b/src/Custom/Responses/ResponsesClient.Protocol.cs similarity index 98% rename from src/Custom/Responses/OpenAIResponseClient.Protocol.cs rename to src/Custom/Responses/ResponsesClient.Protocol.cs index ddd1a9499..b1439a24e 100644 --- a/src/Custom/Responses/OpenAIResponseClient.Protocol.cs +++ b/src/Custom/Responses/ResponsesClient.Protocol.cs @@ -11,7 +11,7 @@ namespace OpenAI.Responses; [CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] [CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -public partial class ResponseClient +public partial class ResponsesClient { public virtual async Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options) { diff --git a/src/Custom/Responses/OpenAIResponseClient.cs b/src/Custom/Responses/ResponsesClient.cs similarity index 96% rename from src/Custom/Responses/OpenAIResponseClient.cs rename to src/Custom/Responses/ResponsesClient.cs index b49fccd67..1c563cf94 100644 --- a/src/Custom/Responses/OpenAIResponseClient.cs +++ b/src/Custom/Responses/ResponsesClient.cs @@ -23,17 +23,17 @@ namespace OpenAI.Responses; [CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] [CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] -public partial class ResponseClient +public partial class ResponsesClient { private readonly string _model; // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The API key to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public ResponseClient(string model, string apiKey) : this(model, new ApiKeyCredential(apiKey), new OpenAIClientOptions()) + public ResponsesClient(string model, string apiKey) : this(model, new ApiKeyCredential(apiKey), new OpenAIClientOptions()) { } @@ -41,12 +41,12 @@ public partial class ResponseClient // - Added `model` parameter. // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public ResponseClient(string model, ApiKeyCredential credential) : this(model, credential, new OpenAIClientOptions()) + public ResponsesClient(string model, ApiKeyCredential credential) : this(model, credential, new OpenAIClientOptions()) { } @@ -54,34 +54,34 @@ public partial class ResponseClient // - Added `model` parameter. // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - public ResponseClient(string model, ApiKeyCredential credential, OpenAIClientOptions options) : this(model, OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) + public ResponsesClient(string model, ApiKeyCredential credential, OpenAIClientOptions options) : this(model, OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) { } // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. /// or is null. /// is an empty string, and was expected to be non-empty. - public ResponseClient(string model, AuthenticationPolicy authenticationPolicy) : this(model, authenticationPolicy, new OpenAIClientOptions()) + public ResponsesClient(string model, AuthenticationPolicy authenticationPolicy) : this(model, authenticationPolicy, new OpenAIClientOptions()) { } // CUSTOM: Added as a convenience. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - public ResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) + public ResponsesClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) { Argument.AssertNotNullOrEmpty(model, nameof(model)); Argument.AssertNotNull(authenticationPolicy, nameof(authenticationPolicy)); @@ -97,13 +97,13 @@ public ResponseClient(string model, AuthenticationPolicy authenticationPolicy, O // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. // - Made protected. - /// Initializes a new instance of . + /// Initializes a new instance of . /// The HTTP pipeline to send and receive REST requests and responses. /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The options to configure the client. /// or is null. /// is an empty string, and was expected to be non-empty. - protected internal ResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options) + protected internal ResponsesClient(ClientPipeline pipeline, string model, OpenAIClientOptions options) { Argument.AssertNotNull(pipeline, nameof(pipeline)); options ??= new OpenAIClientOptions(); @@ -461,7 +461,7 @@ internal virtual CreateResponseOptions CreatePerCallOptions(CreateResponseOption if (stream) { - copiedOptions.Stream = true; + copiedOptions.IsStreamingEnabled = true; } return copiedOptions; diff --git a/src/Generated/OpenAIClient.cs b/src/Generated/OpenAIClient.cs index 39ed5d254..29f3b1ee1 100644 --- a/src/Generated/OpenAIClient.cs +++ b/src/Generated/OpenAIClient.cs @@ -17,7 +17,7 @@ public partial class OpenAIClient private readonly ApiKeyCredential _keyCredential; private const string AuthorizationHeader = "Authorization"; private const string AuthorizationApiKeyPrefix = "Bearer"; - private ResponseClient _cachedResponseClient; + private ResponsesClient _cachedResponseClient; protected OpenAIClient() { @@ -26,9 +26,9 @@ protected OpenAIClient() public ClientPipeline Pipeline { get; } [Experimental("OPENAI001")] - public virtual ResponseClient GetResponseClient() + public virtual ResponsesClient GetResponseClient() { - return Volatile.Read(ref _cachedResponseClient) ?? Interlocked.CompareExchange(ref _cachedResponseClient, new ResponseClient(Pipeline, _endpoint), null) ?? _cachedResponseClient; + return Volatile.Read(ref _cachedResponseClient) ?? Interlocked.CompareExchange(ref _cachedResponseClient, new ResponsesClient(Pipeline, _endpoint), null) ?? _cachedResponseClient; } } } diff --git a/src/Generated/ResponseClient.RestClient.cs b/src/Generated/ResponseClient.RestClient.cs index 0aed8c460..a897c1095 100644 --- a/src/Generated/ResponseClient.RestClient.cs +++ b/src/Generated/ResponseClient.RestClient.cs @@ -8,7 +8,7 @@ namespace OpenAI.Responses { - public partial class ResponseClient + public partial class ResponsesClient { private static PipelineMessageClassifier _pipelineMessageClassifier200; diff --git a/src/Generated/ResponseClient.cs b/src/Generated/ResponseClient.cs index 85247c3ea..9f9489138 100644 --- a/src/Generated/ResponseClient.cs +++ b/src/Generated/ResponseClient.cs @@ -13,15 +13,15 @@ namespace OpenAI.Responses { [Experimental("OPENAI001")] - public partial class ResponseClient + public partial class ResponsesClient { private readonly Uri _endpoint; - protected ResponseClient() + protected ResponsesClient() { } - internal ResponseClient(ClientPipeline pipeline, Uri endpoint) + internal ResponsesClient(ClientPipeline pipeline, Uri endpoint) { _endpoint = endpoint; Pipeline = pipeline; diff --git a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs index c96580874..91d8196a5 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs @@ -12,7 +12,7 @@ namespace OpenAI.Responses { internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResult : AsyncCollectionResult { - private readonly ResponseClient _client; + private readonly ResponsesClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResult private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsAsyncCollectionResult(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsAsyncCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs index 09f90e9ec..c07dadeee 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs @@ -13,7 +13,7 @@ namespace OpenAI.Responses { internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResultOfT : AsyncCollectionResult { - private readonly ResponseClient _client; + private readonly ResponsesClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -21,7 +21,7 @@ internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResultO private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsAsyncCollectionResultOfT(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsAsyncCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs b/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs index 95e5a6f33..2ab6961d1 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs @@ -12,7 +12,7 @@ namespace OpenAI.Responses { internal partial class ResponseClientGetResponseInputItemsCollectionResult : CollectionResult { - private readonly ResponseClient _client; + private readonly ResponsesClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsCollectionResult : Col private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsCollectionResult(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs b/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs index 5a00d7c2b..562f740fc 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs +++ b/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs @@ -12,7 +12,7 @@ namespace OpenAI.Responses { internal partial class ResponseClientGetResponseInputItemsCollectionResultOfT : CollectionResult { - private readonly ResponseClient _client; + private readonly ResponsesClient _client; private readonly string _responseId; private readonly int? _limit; private readonly string _order; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsCollectionResultOfT : private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsCollectionResultOfT(ResponseClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponseClientGetResponseInputItemsCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/tests/Responses/ResponseStoreTests.cs b/tests/Responses/ResponseStoreTests.cs index 42fedcc98..6f3cb5240 100644 --- a/tests/Responses/ResponseStoreTests.cs +++ b/tests/Responses/ResponseStoreTests.cs @@ -23,7 +23,7 @@ public ResponseStoreTests(bool isAsync) : base(isAsync) [RecordedTest] public async Task GetInputItemsWithPagination() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Create a response with multiple input items List inputItems = new() @@ -60,7 +60,7 @@ public async Task GetInputItemsWithPagination() [RecordedTest] public async Task GetInputItemsWithMultiPartPagination() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); @@ -112,7 +112,7 @@ public async Task GetInputItemsWithMultiPartPagination() [RecordedTest] public async Task GetInputItemsWithAfterIdPagination() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Ensure multiple input items exist to paginate List inputItems = new() @@ -153,7 +153,7 @@ public async Task GetInputItemsWithAfterIdPagination() [RecordedTest] public async Task GetInputItemsWithOrderFiltering() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Create inputs in a defined sequence List inputItems = new() @@ -202,7 +202,7 @@ public async Task GetInputItemsWithOrderFiltering() [RecordedTest] public async Task GetInputItemsHandlesLargeLimits() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -227,7 +227,7 @@ public async Task GetInputItemsHandlesLargeLimits() [RecordedTest] public async Task GetInputItemsWithMinimalLimits() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -252,7 +252,7 @@ public async Task GetInputItemsWithMinimalLimits() [RecordedTest] public async Task GetInputItemsWithCancellationToken() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -288,7 +288,7 @@ public async Task GetInputItemsWithCancellationToken() [RecordedTest] public async Task GetInputItemsWithCombinedOptions() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -316,5 +316,5 @@ public async Task GetInputItemsWithCombinedOptions() Assert.That(items, Has.Count.GreaterThan(0)); } - private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponsesClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index 80bd2719f..b4d70d5d5 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -75,7 +75,7 @@ private void Validate(T input) where T : class [RecordedTest] public async Task ComputerToolWithScreenshotRoundTrip() { - ResponseClient client = GetTestClient("computer-use-preview-2025-03-11"); + ResponsesClient client = GetTestClient("computer-use-preview-2025-03-11"); ResponseTool computerTool = ResponseTool.CreateComputerTool(ComputerToolEnvironment.Windows, 1024, 768); CreateResponseOptions responseOptions = new( [ @@ -84,14 +84,14 @@ public async Task ComputerToolWithScreenshotRoundTrip() ]) { Tools = { computerTool }, - Truncation = ResponseTruncationMode.Auto, + TruncationMode = ResponseTruncationMode.Auto, }; ResponseResult response = await client.CreateResponseAsync(responseOptions); while (true) { - Assert.That(response.Output.Count, Is.GreaterThan(0)); - ResponseItem outputItem = response.Output?.LastOrDefault(); + Assert.That(response.OutputItems.Count, Is.GreaterThan(0)); + ResponseItem outputItem = response.OutputItems?.LastOrDefault(); if (outputItem is ComputerCallResponseItem computerCall) { if (computerCall.Action.Kind == ComputerCallActionKind.Screenshot) @@ -138,7 +138,7 @@ public async Task ComputerToolWithScreenshotRoundTrip() [RecordedTest] public async Task WebSearchCall() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Searching the internet, what's the weather like in Seattle?")]) { @@ -149,11 +149,11 @@ public async Task WebSearchCall() ToolChoice = ResponseToolChoice.CreateWebSearchChoice() }); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -165,7 +165,7 @@ public async Task WebSearchCall() [RecordedTest] public async Task WebSearchCallPreview() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("What was a positive news story from today?")]) { @@ -176,11 +176,11 @@ public async Task WebSearchCallPreview() ToolChoice = ResponseToolChoice.CreateWebSearchChoice() }); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -192,7 +192,7 @@ public async Task WebSearchCallPreview() [RecordedTest] public async Task WebSearchCallStreaming() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); const string message = "Searching the internet, what's the weather like in San Francisco?"; @@ -260,7 +260,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ResponseWithImageGenTool() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -280,17 +280,17 @@ public async Task ResponseWithImageGenTool() ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -298,7 +298,7 @@ public async Task ResponseWithImageGenTool() [RecordedTest] public async Task ImageGenToolStreaming() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; @@ -381,7 +381,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolInputMaskWithImageBytes() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); @@ -399,17 +399,17 @@ public async Task ImageGenToolInputMaskWithImageBytes() ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -417,7 +417,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() [RecordedTest] public async Task ImageGenToolInputMaskWithImageUri() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -433,17 +433,17 @@ public async Task ImageGenToolInputMaskWithImageUri() ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -451,7 +451,7 @@ public async Task ImageGenToolInputMaskWithImageUri() [RecordedTest] public async Task ImageGenToolInputMaskWithFileId() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); @@ -484,17 +484,17 @@ public async Task ImageGenToolInputMaskWithFileId() ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -502,7 +502,7 @@ public async Task ImageGenToolInputMaskWithFileId() [RecordedTest] public async Task StreamingResponses() { - ResponseClient client = GetTestClient("gpt-4o-mini"); // "computer-use-alpha"); + ResponsesClient client = GetTestClient("gpt-4o-mini"); // "computer-use-alpha"); List inputItems = [ResponseItem.CreateUserMessageItem("Hello, world!")]; List deltaTextSegments = []; @@ -517,7 +517,7 @@ public async Task StreamingResponses() } else if (update is StreamingResponseCompletedUpdate responseCompletedUpdate) { - finalResponseText = responseCompletedUpdate.Response.Output[0] is MessageResponseItem messageItem + finalResponseText = responseCompletedUpdate.Response.OutputItems[0] is MessageResponseItem messageItem ? messageItem.Content[0].Text : null; } @@ -530,12 +530,12 @@ public async Task StreamingResponses() [RecordedTest] public async Task StreamingResponsesWithReasoningSummary() { - ResponseClient client = GetTestClient("o3-mini"); + ResponsesClient client = GetTestClient("o3-mini"); List inputItems = [ResponseItem.CreateUserMessageItem("I’m visiting New York for 3 days and love food and art. What’s the best way to plan my trip?")]; CreateResponseOptions options = new(inputItems) { - Reasoning = new() + ReasoningOptions = new() { ReasoningSummaryVerbosity = ResponseReasoningSummaryVerbosity.Auto, ReasoningEffortLevel = ResponseReasoningEffortLevel.High, @@ -592,7 +592,7 @@ public async Task StreamingResponsesWithReasoningSummary() [TestCase("computer-use-preview")] public async Task ResponsesHelloWorldWithTool(string model) { - ResponseClient client = GetTestClient(model); + ResponsesClient client = GetTestClient(model); CreateResponseOptions options = new( [ @@ -620,7 +620,7 @@ public async Task ResponsesHelloWorldWithTool(string model) """), strictModeEnabled: false), }, - Truncation = ResponseTruncationMode.Auto, + TruncationMode = ResponseTruncationMode.Auto, }; ResponseResult response = await client.CreateResponseAsync( @@ -633,17 +633,17 @@ public async Task ResponsesHelloWorldWithTool(string model) Assert.That(response.PreviousResponseId, Is.Null); // Observed: input may not exist on normal responses // Assert.That(response.Input.Count, Is.EqualTo(1)); - Assert.That(response.Output.Count, Is.EqualTo(1)); + Assert.That(response.OutputItems.Count, Is.EqualTo(1)); } [RecordedTest] public async Task ResponsesWithReasoning() { - ResponseClient client = GetTestClient("o3-mini"); + ResponsesClient client = GetTestClient("o3-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")]) { - Reasoning = new() + ReasoningOptions = new() { ReasoningSummaryVerbosity = ResponseReasoningSummaryVerbosity.Detailed, ReasoningEffortLevel = ResponseReasoningEffortLevel.Low, @@ -659,17 +659,17 @@ public async Task ResponsesWithReasoning() Assert.That(response, Is.Not.Null); Assert.That(response.Id, Is.Not.Null); Assert.That(response.CreatedAt, Is.GreaterThan(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero))); - Assert.That(response.Truncation, Is.EqualTo(ResponseTruncationMode.Disabled)); - Assert.That(response.MaxOutputTokens, Is.Null); + Assert.That(response.TruncationMode, Is.EqualTo(ResponseTruncationMode.Disabled)); + Assert.That(response.MaxOutputTokenCount, Is.Null); Assert.That(response.Model, Does.StartWith("o3-mini")); Assert.That(response.Usage, Is.Not.Null); Assert.That(response.Usage.OutputTokenDetails, Is.Not.Null); Assert.That(response.Usage.OutputTokenDetails.ReasoningTokenCount, Is.GreaterThan(0)); Assert.That(response.Metadata, Is.Not.Null.Or.Empty); Assert.That(response.Metadata["superfluous_key"], Is.EqualTo("superfluous_value")); - Assert.That(response.Output, Has.Count.EqualTo(2)); - ReasoningResponseItem reasoningItem = response.Output[0] as ReasoningResponseItem; - MessageResponseItem messageItem = response.Output[1] as MessageResponseItem; + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + ReasoningResponseItem reasoningItem = response.OutputItems[0] as ReasoningResponseItem; + MessageResponseItem messageItem = response.OutputItems[1] as MessageResponseItem; Assert.That(reasoningItem.SummaryParts, Has.Count.GreaterThan(0)); Assert.That(reasoningItem.GetSummaryText(), Is.Not.Null.And.Not.Empty); Assert.That(reasoningItem.Id, Is.Not.Null.And.Not.Empty); @@ -681,7 +681,7 @@ public async Task ResponsesWithReasoning() [TestCase("gpt-4o-mini")] public async Task HelloWorldStreaming(string model) { - ResponseClient client = GetTestClient(model); + ResponsesClient client = GetTestClient(model); ResponseContentPart contentPart = ResponseContentPart.CreateInputTextPart("Hello, responses!"); @@ -691,7 +691,7 @@ ResponseContentPart contentPart in client.CreateResponseStreamingAsync( new ([inputItem]) { - Truncation = ResponseTruncationMode.Auto, + TruncationMode = ResponseTruncationMode.Auto, })) { Console.WriteLine(ModelReaderWriter.Write(update)); @@ -701,7 +701,7 @@ in client.CreateResponseStreamingAsync( [RecordedTest] public async Task CanDeleteResponse() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new([ResponseItem.CreateUserMessageItem("Hello, model!")])); @@ -722,12 +722,12 @@ async Task RetrieveThatResponseAsync() [RecordedTest] public async Task CanOptOutOfStorage() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new ([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - Store = false, + IsStoredOutputEnabled = false, }); ClientResultException expectedException = Assert.ThrowsAsync(async () => await client.GetResponseAsync(new(response.Id))); @@ -737,7 +737,7 @@ public async Task CanOptOutOfStorage() [RecordedTest] public async Task ResponseServiceTierWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); MessageResponseItem message = ResponseItem.CreateUserMessageItem("Using a comprehensive evaluation of popular media in the 1970s and 1980s, what were the most common sci-fi themes?"); CreateResponseOptions options = new([message]) @@ -753,14 +753,14 @@ public async Task ResponseServiceTierWorks() [RecordedTest] public async Task OutputTextMethod() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); var outputText = response.GetOutputText(); Assert.That(outputText.Length, Is.GreaterThan(0).And.LessThan(7)); Assert.That(outputText.ToLower(), Does.Contain("hello")); - response.Output.Add(ResponseItem.CreateAssistantMessageItem("More text!")); + response.OutputItems.Add(ResponseItem.CreateAssistantMessageItem("More text!")); Assert.That(response.GetOutputText().ToLower(), Does.EndWith("more text!")); response = await client.CreateResponseAsync( @@ -776,13 +776,13 @@ public async Task OutputTextMethod() }, ToolChoice = ResponseToolChoice.CreateRequiredChoice(), }); - Assert.That(response.OutputText, Is.Null); + Assert.That(response.GetOutputText(), Is.Null); } [RecordedTest] public async Task MessageHistoryWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(new( [ @@ -799,7 +799,7 @@ public async Task MessageHistoryWorks() [RecordedTest] public async Task ImageInputWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); string imagePath = Path.Join("Assets", "images_dog_and_cat.png"); BinaryData imageBytes = BinaryData.FromBytes(await File.ReadAllBytesAsync(imagePath)); @@ -813,14 +813,14 @@ public async Task ImageInputWorks() ]), ])); - Console.WriteLine(response.OutputText); - Assert.That(response.OutputText.ToLowerInvariant(), Does.Contain("dog").Or.Contain("cat").IgnoreCase); + Console.WriteLine(response.GetOutputText()); + Assert.That(response.GetOutputText().ToLowerInvariant(), Does.Contain("dog").Or.Contain("cat").IgnoreCase); } [RecordedTest] public async Task FileInputFromIdWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); @@ -849,7 +849,7 @@ public async Task FileInputFromIdWorks() [RecordedTest] public async Task FileInputFromBinaryWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); string filePath = Path.Join("Assets", "files_travis_favorite_food.pdf"); Stream fileStream = File.OpenRead(filePath); @@ -895,17 +895,17 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr options.Instructions = instructions; } - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); - Assert.That(response.Output, Is.Not.Null.And.Not.Empty); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That((response.Output[0] as MessageResponseItem).Content, Is.Not.Null.And.Not.Empty); - Assert.That((response.Output[0] as MessageResponseItem).Content[0].Text, Does.StartWith("Arr, matey")); + Assert.That(response.OutputItems, Is.Not.Null.And.Not.Empty); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That((response.OutputItems[0] as MessageResponseItem).Content, Is.Not.Null.And.Not.Empty); + Assert.That((response.OutputItems[0] as MessageResponseItem).Content[0].Text, Does.StartWith("Arr, matey")); ResponseResult retrievedResponse = await client.GetResponseAsync(new(response.Id)); - Assert.That((retrievedResponse?.Output?.FirstOrDefault() as MessageResponseItem)?.Content?.FirstOrDefault()?.Text, Does.StartWith("Arr, matey")); + Assert.That((retrievedResponse?.OutputItems?.FirstOrDefault() as MessageResponseItem)?.Content?.FirstOrDefault()?.Text, Does.StartWith("Arr, matey")); if (instructionMethod == ResponsesTestInstructionMethod.InstructionsProperty) { @@ -939,8 +939,8 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr [RecordedTest] public async Task TwoTurnCrossModel() { - ResponseClient client = GetTestClient("gpt-4o-mini"); - ResponseClient client2 = GetTestClient("o3-mini"); + ResponsesClient client = GetTestClient("gpt-4o-mini"); + ResponsesClient client2 = GetTestClient("o3-mini"); ResponseResult response = await client.CreateResponseAsync(new( @@ -957,12 +957,12 @@ public async Task TwoTurnCrossModel() [TestCase("computer-use-preview", Ignore = "Not yet supported with computer-use-preview")] public async Task StructuredOutputs(string modelName) { - ResponseClient client = GetTestClient(modelName); + ResponsesClient client = GetTestClient(modelName); ResponseResult response = await client.CreateResponseAsync( new ([ResponseItem.CreateUserMessageItem("Write a JSON document with a list of five animals")]) { - Text = new ResponseTextOptions() + TextOptions = new ResponseTextOptions() { TextFormat = ResponseTextFormat.CreateJsonSchemaFormat( "data_list", @@ -985,10 +985,10 @@ public async Task StructuredOutputs(string modelName) }); Assert.That( - response?.Text?.TextFormat?.Kind, + response?.TextOptions?.TextFormat?.Kind, Is.EqualTo(ResponseTextFormatKind.JsonSchema)); - Assert.That(response?.Output, Has.Count.EqualTo(1)); - MessageResponseItem message = response.Output[0] as MessageResponseItem; + Assert.That(response?.OutputItems, Has.Count.EqualTo(1)); + MessageResponseItem message = response.OutputItems[0] as MessageResponseItem; Assert.That(message?.Content, Has.Count.EqualTo(1)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -1003,7 +1003,7 @@ public async Task StructuredOutputs(string modelName) [RecordedTest] public async Task FunctionCallWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { @@ -1013,8 +1013,8 @@ public async Task FunctionCallWorks() ResponseResult response = await client.CreateResponseAsync( options); - Assert.That(response.Output, Has.Count.EqualTo(1)); - FunctionCallResponseItem functionCall = response.Output[0] as FunctionCallResponseItem; + Assert.That(response.OutputItems, Has.Count.EqualTo(1)); + FunctionCallResponseItem functionCall = response.OutputItems[0] as FunctionCallResponseItem; Assert.That(functionCall, Is.Not.Null); Assert.That(functionCall!.Id, Has.Length.GreaterThan(0)); Assert.That(functionCall.FunctionName, Is.EqualTo("get_weather_at_location")); @@ -1035,8 +1035,8 @@ public async Task FunctionCallWorks() ResponseResult turn2Response = await client.CreateResponseAsync( turn2Options); - Assert.That(turn2Response.Output?.Count, Is.EqualTo(1)); - MessageResponseItem turn2Message = turn2Response!.Output[0] as MessageResponseItem; + Assert.That(turn2Response.OutputItems?.Count, Is.EqualTo(1)); + MessageResponseItem turn2Message = turn2Response!.OutputItems[0] as MessageResponseItem; Assert.That(turn2Message, Is.Not.Null); Assert.That(turn2Message!.Role, Is.EqualTo(MessageRole.Assistant)); Assert.That(turn2Message.Content, Has.Count.EqualTo(1)); @@ -1049,7 +1049,7 @@ public async Task FunctionCallWorks() [RecordedTest] public async Task FunctionCallStreamingWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) { @@ -1098,18 +1098,18 @@ public async Task FunctionCallStreamingWorks() [RecordedTest] public async Task MaxTokens() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Write three haikus about tropical fruit")]) { - MaxOutputTokens = 20, + MaxOutputTokenCount = 20, }); Assert.That( - response?.IncompleteDetails?.Reason, + response?.IncompleteStatusDetails?.Reason, Is.EqualTo(ResponseIncompleteStatusReason.MaxOutputTokens)); - MessageResponseItem message = response?.Output?.FirstOrDefault() as MessageResponseItem; ; + MessageResponseItem message = response?.OutputItems?.FirstOrDefault() as MessageResponseItem; ; Assert.That(message?.Content?.FirstOrDefault(), Is.Not.Null); Assert.That(message?.Status, Is.EqualTo(MessageStatus.Incomplete)); } @@ -1117,7 +1117,7 @@ public async Task MaxTokens() [RecordedTest] public async Task FunctionToolChoiceWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseToolChoice toolChoice = ResponseToolChoice.CreateFunctionChoice(s_GetWeatherAtLocationToolName); @@ -1135,7 +1135,7 @@ ResponseToolChoice toolChoice Assert.That(response.ToolChoice.Kind, Is.EqualTo(ResponseToolChoiceKind.Function)); Assert.That(response.ToolChoice.FunctionName, Is.EqualTo(toolChoice.FunctionName)); - FunctionCallResponseItem functionCall = response.Output.FirstOrDefault() as FunctionCallResponseItem; + FunctionCallResponseItem functionCall = response.OutputItems.FirstOrDefault() as FunctionCallResponseItem; Assert.That(functionCall, Is.Not.Null); Assert.That(functionCall.FunctionName, Is.EqualTo(toolChoice.FunctionName)); } @@ -1144,11 +1144,11 @@ ResponseToolChoice toolChoice [RecordedTest] public async Task CanStreamBackgroundResponses() { - ResponseClient client = GetTestClient("gpt-4.1-mini"); + ResponsesClient client = GetTestClient("gpt-4.1-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - Background = true, + IsBackgroundModeEnabled = true, }; AsyncCollectionResult updates = client.CreateResponseStreamingAsync(options); @@ -1175,7 +1175,7 @@ public async Task CanStreamBackgroundResponses() Assert.That(retrievedResponse, Is.Not.Null); Assert.That(retrievedResponse.Id, Is.EqualTo(queuedResponseId)); - Assert.That(retrievedResponse.Background, Is.True); + Assert.That(retrievedResponse.IsBackgroundModeEnabled, Is.True); Assert.That(retrievedResponse.Status, Is.EqualTo(ResponseStatus.Queued)); // Now try continuing the stream. @@ -1199,24 +1199,24 @@ public async Task CanStreamBackgroundResponses() Assert.That(firstContinuedSequenceNumber, Is.EqualTo(lastSequenceNumber + 1)); Assert.That(completedResponse?.Id, Is.EqualTo(queuedResponseId)); - Assert.That(completedResponse?.Output?.FirstOrDefault(), Is.Not.Null); + Assert.That(completedResponse?.OutputItems?.FirstOrDefault(), Is.Not.Null); } [RecordedTest] public async Task CanCancelBackgroundResponses() { - ResponseClient client = GetTestClient("gpt-4.1-mini"); + ResponsesClient client = GetTestClient("gpt-4.1-mini"); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) { - Background = true, + IsBackgroundModeEnabled = true, }; ResponseResult response = await client.CreateResponseAsync(options); Assert.That(response, Is.Not.Null); Assert.That(response.Id, Is.Not.Null.And.Not.Empty); - Assert.That(response.Background, Is.True); + Assert.That(response.IsBackgroundModeEnabled, Is.True); Assert.That(response.Status, Is.EqualTo(ResponseStatus.Queued)); ResponseResult cancelledResponse = await client.CancelResponseAsync(response.Id); @@ -1248,5 +1248,5 @@ public async Task CanCancelBackgroundResponses() """), strictModeEnabled: false); - private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponsesClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Responses/ResponsesToolTests.cs b/tests/Responses/ResponsesToolTests.cs index 49bbb094f..c1623f10f 100644 --- a/tests/Responses/ResponsesToolTests.cs +++ b/tests/Responses/ResponsesToolTests.cs @@ -45,13 +45,13 @@ public async Task MCPToolWorks() } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.GreaterThan(0)); + Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); // Check tool list. - List toolDefinitionListItems = response.Output.OfType().ToList(); + List toolDefinitionListItems = response.OutputItems.OfType().ToList(); Assert.That(toolDefinitionListItems, Has.Count.EqualTo(1)); McpToolDefinitionListItem listItem = toolDefinitionListItems[0]; @@ -63,7 +63,7 @@ public async Task MCPToolWorks() Assert.That(rollToolDefinition.Annotations, Is.Not.Null); // Check tool call. - List toolCallItems = response.Output.OfType().ToList(); + List toolCallItems = response.OutputItems.OfType().ToList(); Assert.That(toolCallItems, Has.Count.EqualTo(1)); McpToolCallItem toolCallItem = toolCallItems[0]; @@ -74,7 +74,7 @@ public async Task MCPToolWorks() Assert.That(toolCallItem.Error, Is.Null); // Check assistant message. - MessageResponseItem assistantMessageItem = response.Output.Last() as MessageResponseItem; + MessageResponseItem assistantMessageItem = response.OutputItems.Last() as MessageResponseItem; Assert.That(assistantMessageItem, Is.Not.Null); } @@ -97,7 +97,7 @@ public async Task MCPToolStreamingWorks() } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(options); @@ -211,15 +211,15 @@ public async Task MCPToolNeverRequiresApproval(bool useGlobalPolicy) } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.GreaterThan(0)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); // Confirm there are no approval requests and that the tool was called. - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); } [RecordedTest] @@ -252,15 +252,15 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response1 = await client.CreateResponseAsync(options); - Assert.That(response1.Output, Has.Count.GreaterThan(0)); - Assert.That(response1.Output.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response1.Output.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response1.OutputItems, Has.Count.GreaterThan(0)); + Assert.That(response1.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response1.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); // Check that it stopped at the approval request. - McpToolCallApprovalRequestItem approvalRequestItem = response1.Output.Last() as McpToolCallApprovalRequestItem; + McpToolCallApprovalRequestItem approvalRequestItem = response1.OutputItems.Last() as McpToolCallApprovalRequestItem; Assert.That(approvalRequestItem, Is.Not.Null); // Prepare the response. @@ -270,8 +270,8 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) options.Input.Add(approvalResponseItem); ResponseResult response2 = await client.CreateResponseAsync(options); - Assert.That(response2.Output, Has.Count.GreaterThan(0)); - Assert.That(response2.Output.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response2.OutputItems, Has.Count.GreaterThan(0)); + Assert.That(response2.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); } [RecordedTest] @@ -297,14 +297,14 @@ public async Task MCPToolWithAllowedTools() } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.GreaterThan(0)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); - List toolCallItems = response.Output.OfType().ToList(); + List toolCallItems = response.OutputItems.OfType().ToList(); Assert.That(toolCallItems, Has.Count.EqualTo(1)); McpToolCallItem toolCallItem = toolCallItems[0]; @@ -338,13 +338,13 @@ public async Task MCPToolWithDisallowedTools() } }; - ResponseClient client = GetTestClient(overrideModel: "gpt-5"); + ResponsesClient client = GetTestClient(overrideModel: "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.GreaterThan(0)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(1)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); - Assert.That(response.Output.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.OutputItems, Has.Count.GreaterThan(0)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(1)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); + Assert.That(response.OutputItems.OfType().ToList(), Has.Count.EqualTo(0)); } [RecordedTest] @@ -372,7 +372,7 @@ public async Task FileSearch() await Task.Delay(TimeSpan.FromSeconds(5)); } - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( new([ResponseItem.CreateUserMessageItem("Using the file search tool, what's Travis's favorite food?")]) @@ -382,12 +382,12 @@ public async Task FileSearch() ResponseTool.CreateFileSearchTool(vectorStoreIds: [vectorStore.Id]), } }); - Assert.That(response.Output?.Count, Is.EqualTo(2)); - FileSearchCallResponseItem fileSearchCall = response.Output[0] as FileSearchCallResponseItem; + Assert.That(response.OutputItems?.Count, Is.EqualTo(2)); + FileSearchCallResponseItem fileSearchCall = response.OutputItems[0] as FileSearchCallResponseItem; Assert.That(fileSearchCall, Is.Not.Null); Assert.That(fileSearchCall?.Status, Is.EqualTo(FileSearchCallStatus.Completed)); Assert.That(fileSearchCall?.Queries, Has.Count.GreaterThan(0)); - MessageResponseItem message = response.Output[1] as MessageResponseItem; + MessageResponseItem message = response.OutputItems[1] as MessageResponseItem; Assert.That(message, Is.Not.Null); ResponseContentPart messageContentPart = message.Content?.FirstOrDefault(); Assert.That(messageContentPart, Is.Not.Null); @@ -407,7 +407,7 @@ public async Task FileSearch() [RecordedTest] public async Task CodeInterpreterToolWithoutFileIds() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) @@ -419,11 +419,11 @@ public async Task CodeInterpreterToolWithoutFileIds() responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -437,7 +437,7 @@ public async Task CodeInterpreterToolWithoutFileIds() [RecordedTest] public async Task CodeInterpreterToolWithEmptyFileIds() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(new AutomaticCodeInterpreterToolContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.")]) @@ -450,11 +450,11 @@ public async Task CodeInterpreterToolWithEmptyFileIds() Assert.That(response, Is.Not.Null); Assert.That(response, Is.Not.Null); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -469,7 +469,7 @@ public async Task CodeInterpreterToolWithEmptyFileIds() public async Task CodeInterpreterToolWithContainerIdFromContainerApi() { ContainerClient containerClient = GetProxiedOpenAIClient(TestScenario.Containers); - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Create a container first using the Containers API CreateContainerBody containerBody = new("test-container-for-code-interpreter"); @@ -492,11 +492,11 @@ public async Task CodeInterpreterToolWithContainerIdFromContainerApi() responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(message.Content[0].Text, Is.Not.Null.And.Not.Empty); @@ -524,7 +524,7 @@ public async Task CodeInterpreterToolWithContainerIdFromContainerApi() public async Task CodeInterpreterToolWithUploadedFileIds() { OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Create some test files to upload string csvContent = "name,age,city\nAlice,30,New York\nBob,25,Los Angeles\nCharlie,35,Chicago"; @@ -557,7 +557,7 @@ public async Task CodeInterpreterToolWithUploadedFileIds() responseOptions); Assert.That(response, Is.Not.Null); - Assert.That(response.Output, Is.Not.Null.And.Not.Empty); + Assert.That(response.OutputItems, Is.Not.Null.And.Not.Empty); // Basic validation that the response was created successfully Assert.That(response.Id, Is.Not.Null.And.Not.Empty); @@ -585,7 +585,7 @@ public async Task CodeInterpreterToolWithUploadedFileIds() [RecordedTest] public async Task CodeInterpreterToolStreaming() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(new AutomaticCodeInterpreterToolContainerConfiguration())); CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code and show me the code step by step.")]) @@ -619,7 +619,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) public async Task CodeInterpreterToolStreamingWithFiles() { OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); // Create test CSV data string csvContent = "x,y\n1,2\n2,4\n3,6\n4,8\n5,10"; @@ -682,7 +682,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolWorks() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -701,17 +701,17 @@ public async Task ImageGenToolWorks() ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -719,7 +719,7 @@ public async Task ImageGenToolWorks() [RecordedTest] public async Task ImageGenToolStreaming() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; @@ -802,7 +802,7 @@ in client.CreateResponseStreamingAsync(responseOptions)) [RecordedTest] public async Task ImageGenToolInputMaskWithImageBytes() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); @@ -819,17 +819,17 @@ public async Task ImageGenToolInputMaskWithImageBytes() ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -837,7 +837,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() [RecordedTest] public async Task ImageGenToolInputMaskWithImageUri() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) { @@ -852,17 +852,17 @@ public async Task ImageGenToolInputMaskWithImageUri() ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -870,7 +870,7 @@ public async Task ImageGenToolInputMaskWithImageUri() [RecordedTest] public async Task ImageGenToolInputMaskWithFileId() { - ResponseClient client = GetTestClient(); + ResponsesClient client = GetTestClient(); OpenAIFileClient fileClient = GetProxiedOpenAIClient(TestScenario.Files); @@ -902,17 +902,17 @@ public async Task ImageGenToolInputMaskWithFileId() ResponseResult response = await client.CreateResponseAsync(options); - Assert.That(response.Output, Has.Count.EqualTo(2)); - Assert.That(response.Output[0], Is.InstanceOf()); - Assert.That(response.Output[1], Is.InstanceOf()); + Assert.That(response.OutputItems, Has.Count.EqualTo(2)); + Assert.That(response.OutputItems[0], Is.InstanceOf()); + Assert.That(response.OutputItems[1], Is.InstanceOf()); - MessageResponseItem message = (MessageResponseItem)response.Output[1]; + MessageResponseItem message = (MessageResponseItem)response.OutputItems[1]; Assert.That(message.Content, Has.Count.GreaterThan(0)); Assert.That(message.Content[0].Kind, Is.EqualTo(ResponseContentPartKind.OutputText)); Assert.That(response.Tools.FirstOrDefault(), Is.TypeOf()); - ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.Output[0]; + ImageGenerationCallResponseItem imageGenResponse = (ImageGenerationCallResponseItem)response.OutputItems[0]; Assert.That(imageGenResponse.Status, Is.EqualTo(ImageGenerationCallStatus.Completed)); Assert.That(imageGenResponse.ImageResultBytes.ToArray(), Is.Not.Null.And.Not.Empty); } @@ -987,5 +987,5 @@ private static void ValidateCodeInterpreterEvent(ref int inProgressCount, ref in } } - private ResponseClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); + private ResponsesClient GetTestClient(string overrideModel = null) => GetProxiedOpenAIClient(TestScenario.Responses, overrideModel); } \ No newline at end of file diff --git a/tests/Utility/TestHelpers.cs b/tests/Utility/TestHelpers.cs index 6f148e5cf..242366a5d 100644 --- a/tests/Utility/TestHelpers.cs +++ b/tests/Utility/TestHelpers.cs @@ -121,7 +121,7 @@ public static T GetTestClient( TestScenario.Realtime => new RealtimeClient(credential, options), #pragma warning restore #pragma warning disable OPENAI003 - TestScenario.Responses => new ResponseClient(model, credential, options), + TestScenario.Responses => new ResponsesClient(model, credential, options), #pragma warning restore _ => throw new NotImplementedException(), }; From 2b9deaadd4b521606c9ee085529d160987de0551 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Wed, 12 Nov 2025 12:59:37 -0600 Subject: [PATCH 11/15] export --- api/OpenAI.net8.0.cs | 228 ++++++++++++------ api/OpenAI.netstandard2.0.cs | 218 +++++++++++------ src/Generated/OpenAIClient.cs | 6 +- ...lient.cs => ResponsesClient.RestClient.cs} | 0 .../{ResponseClient.cs => ResponsesClient.cs} | 8 +- ...esponseInputItemsAsyncCollectionResult.cs} | 4 +- ...onseInputItemsAsyncCollectionResultOfT.cs} | 4 +- ...tGetResponseInputItemsCollectionResult.cs} | 4 +- ...tResponseInputItemsCollectionResultOfT.cs} | 4 +- 9 files changed, 315 insertions(+), 161 deletions(-) rename src/Generated/{ResponseClient.RestClient.cs => ResponsesClient.RestClient.cs} (100%) rename src/Generated/{ResponseClient.cs => ResponsesClient.cs} (93%) rename src/Generated/{ResponseClientGetResponseInputItemsAsyncCollectionResult.cs => ResponsesClientGetResponseInputItemsAsyncCollectionResult.cs} (88%) rename src/Generated/{ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs => ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT.cs} (89%) rename src/Generated/{ResponseClientGetResponseInputItemsCollectionResult.cs => ResponsesClientGetResponseInputItemsCollectionResult.cs} (88%) rename src/Generated/{ResponseClientGetResponseInputItemsCollectionResultOfT.cs => ResponsesClientGetResponseInputItemsCollectionResultOfT.cs} (88%) diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index e97856f47..47e63ef44 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -34,10 +34,12 @@ public class OpenAIClient { public virtual OpenAIFileClient GetOpenAIFileClient(); public virtual OpenAIModelClient GetOpenAIModelClient(); [Experimental("OPENAI001")] - public virtual OpenAIResponseClient GetOpenAIResponseClient(string model); + public virtual ResponsesClient GetOpenAIResponseClient(string model); [Experimental("OPENAI002")] public virtual RealtimeClient GetRealtimeClient(); [Experimental("OPENAI001")] + public virtual ResponsesClient GetResponsesClient(); + [Experimental("OPENAI001")] public virtual VectorStoreClient GetVectorStoreClient(); [Experimental("OPENAI001")] public virtual VideoClient GetVideoClient(); @@ -5055,28 +5057,29 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, [Experimental("OPENAI001")] public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(List input); - public bool? Background { get; set; } + public string EndUserId { get; set; } public IList Include { get; set; } public IList Input { get; } public string Instructions { get; set; } - public int? MaxOutputTokens { get; set; } + public bool? IsBackgroundModeEnabled { get; set; } + public bool? IsParallelToolCallsEnabled { get; set; } + public bool? IsStoredOutputEnabled { get; set; } + public bool? IsStreamingEnabled { get; set; } + public int? MaxOutputTokenCount { get; set; } public IDictionary Metadata { get; } - public bool? ParallelToolCalls { get; set; } + public ModelIdsResponses? Model { get; set; } [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } public string PreviousResponseId { get; set; } - public ResponseReasoningOptions Reasoning { get; set; } + public ResponseReasoningOptions ReasoningOptions { get; set; } public ResponseServiceTier? ServiceTier { get; set; } - public bool? Store { get; set; } - public bool? Stream { get; set; } public float? Temperature { get; set; } - public ResponseTextOptions Text { get; set; } + public ResponseTextOptions TextOptions { get; set; } public ResponseToolChoice ToolChoice { get; set; } public IList Tools { get; } public float? TopP { get; set; } - public ResponseTruncationMode? Truncation { get; set; } - public string User { get; set; } + public ResponseTruncationMode? TruncationMode { get; set; } protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -5576,45 +5579,79 @@ public enum MessageStatus { Incomplete = 2 } [Experimental("OPENAI001")] - public class OpenAIResponseClient { - protected OpenAIResponseClient(); - protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options); - public OpenAIResponseClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); - public OpenAIResponseClient(string model, ApiKeyCredential credential); - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options); - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy); - public OpenAIResponseClient(string model, string apiKey); - [Experimental("OPENAI001")] - public virtual Uri Endpoint { get; } - [Experimental("OPENAI001")] - public virtual string Model { get; } - public ClientPipeline Pipeline { get; } - public virtual ClientResult CancelResponse(string responseId, RequestOptions options); - public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); - public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); - public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public readonly partial struct ModelIdsResponses : IEquatable { + public ModelIdsResponses(string value); + public static ModelIdsResponses Chatgpt4oLatest { get; } + public static ModelIdsResponses CodexMiniLatest { get; } + public static ModelIdsResponses ComputerUsePreview { get; } + public static ModelIdsResponses ComputerUsePreview20250311 { get; } + public static ModelIdsResponses Gpt35Turbo { get; } + public static ModelIdsResponses Gpt35Turbo0125 { get; } + public static ModelIdsResponses Gpt35Turbo0301 { get; } + public static ModelIdsResponses Gpt35Turbo0613 { get; } + public static ModelIdsResponses Gpt35Turbo1106 { get; } + public static ModelIdsResponses Gpt35Turbo16k { get; } + public static ModelIdsResponses Gpt35Turbo16k0613 { get; } + public static ModelIdsResponses Gpt4 { get; } + public static ModelIdsResponses Gpt40125Preview { get; } + public static ModelIdsResponses Gpt40314 { get; } + public static ModelIdsResponses Gpt40613 { get; } + public static ModelIdsResponses Gpt41 { get; } + public static ModelIdsResponses Gpt41106Preview { get; } + public static ModelIdsResponses Gpt4120250414 { get; } + public static ModelIdsResponses Gpt41Mini { get; } + public static ModelIdsResponses Gpt41Mini20250414 { get; } + public static ModelIdsResponses Gpt41Nano { get; } + public static ModelIdsResponses Gpt41Nano20250414 { get; } + public static ModelIdsResponses Gpt432k { get; } + public static ModelIdsResponses Gpt432k0314 { get; } + public static ModelIdsResponses Gpt432k0613 { get; } + public static ModelIdsResponses Gpt4o { get; } + public static ModelIdsResponses Gpt4o20240513 { get; } + public static ModelIdsResponses Gpt4o20240806 { get; } + public static ModelIdsResponses Gpt4o20241120 { get; } + public static ModelIdsResponses Gpt4oAudioPreview { get; } + public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } + public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } + public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } + public static ModelIdsResponses Gpt4oMini { get; } + public static ModelIdsResponses Gpt4oMini20240718 { get; } + public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } + public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } + public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } + public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } + public static ModelIdsResponses Gpt4oSearchPreview { get; } + public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } + public static ModelIdsResponses Gpt4Turbo { get; } + public static ModelIdsResponses Gpt4Turbo20240409 { get; } + public static ModelIdsResponses Gpt4TurboPreview { get; } + public static ModelIdsResponses Gpt4VisionPreview { get; } + public static ModelIdsResponses O1 { get; } + public static ModelIdsResponses O120241217 { get; } + public static ModelIdsResponses O1Mini { get; } + public static ModelIdsResponses O1Mini20240912 { get; } + public static ModelIdsResponses O1Preview { get; } + public static ModelIdsResponses O1Preview20240912 { get; } + public static ModelIdsResponses O1Pro { get; } + public static ModelIdsResponses O1Pro20250319 { get; } + public static ModelIdsResponses O3 { get; } + public static ModelIdsResponses O320250416 { get; } + public static ModelIdsResponses O3Mini { get; } + public static ModelIdsResponses O3Mini20250131 { get; } + public static ModelIdsResponses O3Pro { get; } + public static ModelIdsResponses O3Pro20250610 { get; } + public static ModelIdsResponses O4Mini { get; } + public static ModelIdsResponses O4Mini20250416 { get; } + public readonly bool Equals(ModelIdsResponses other); + [EditorBrowsable(EditorBrowsableState.Never)] + public override readonly bool Equals(object obj); + [EditorBrowsable(EditorBrowsableState.Never)] + public override readonly int GetHashCode(); + public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right); + public static implicit operator ModelIdsResponses(string value); + public static implicit operator ModelIdsResponses?(string value); + public static bool operator !=(ModelIdsResponses left, ModelIdsResponses right); + public override readonly string ToString(); } [Experimental("OPENAI001")] public static class OpenAIResponsesModelFactory { @@ -5855,6 +5892,22 @@ public class ResponseItem : IJsonModel, IPersistableModel, IPersistableModel { + public IList Data { get; } + public string FirstId { get; } + public bool HasMore { get; } + public string LastId { get; } + public string Object { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch { get; } + protected virtual ResponseItemCollection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseItemCollection(ClientResult result); + protected virtual ResponseItemCollection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } + [Experimental("OPENAI001")] public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { public string AfterId { get; set; } public string BeforeId { get; set; } @@ -5885,22 +5938,6 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { - public IList Data { get; } - public string FirstId { get; } - public bool HasMore { get; } - public string LastId { get; } - public string Object { get; } - [EditorBrowsable(EditorBrowsableState.Never)] - [Experimental("SCME0001")] - public ref JsonPatch Patch { get; } - protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - public static explicit operator ResponseItemList(ClientResult result); - protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } - [Experimental("OPENAI001")] public class ResponseMessageAnnotation : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } [EditorBrowsable(EditorBrowsableState.Never)] @@ -5978,34 +6015,34 @@ public class ResponseReasoningOptions : IJsonModel, IP } [Experimental("OPENAI001")] public class ResponseResult : IJsonModel, IPersistableModel { - public bool? Background { get; } public DateTimeOffset CreatedAt { get; } + public string EndUserId { get; } public ResponseError Error { get; } public string Id { get; } - public ResponseIncompleteStatusDetails IncompleteDetails { get; } + public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; } public string Instructions { get; } - public int? MaxOutputTokens { get; } + public ModelIdsResponses? InternalModel { get; } + public bool? IsBackgroundModeEnabled { get; } + public bool IsParallelToolCallsEnabled { get; } + public int? MaxOutputTokenCount { get; } public IDictionary Metadata { get; } public string Model { get; } public string Object { get; } - public IList Output { get; } - public string OutputText { get; } - public bool ParallelToolCalls { get; } + public IList OutputItems { get; } [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } public string PreviousResponseId { get; } - public ResponseReasoningOptions Reasoning { get; } + public ResponseReasoningOptions ReasoningOptions { get; } public ResponseServiceTier? ServiceTier { get; } public ResponseStatus? Status { get; } public float? Temperature { get; } - public ResponseTextOptions Text { get; } + public ResponseTextOptions TextOptions { get; } public ResponseToolChoice ToolChoice { get; } public IList Tools { get; } public float? TopP { get; } - public ResponseTruncationMode? Truncation { get; } + public ResponseTruncationMode? TruncationMode { get; } public ResponseTokenUsage Usage { get; } - public string User { get; } public string GetOutputText(); protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); @@ -6015,6 +6052,47 @@ public class ResponseResult : IJsonModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); + public virtual Task CancelResponseAsync(string responseId, RequestOptions options); + public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); + public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); + public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); + public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); + public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); + public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); + public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + } + [Experimental("OPENAI001")] public readonly partial struct ResponseServiceTier : IEquatable { public ResponseServiceTier(string value); public static ResponseServiceTier Auto { get; } diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index b499b8175..c44ad7f18 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -23,8 +23,9 @@ public class OpenAIClient { public virtual ModerationClient GetModerationClient(string model); public virtual OpenAIFileClient GetOpenAIFileClient(); public virtual OpenAIModelClient GetOpenAIModelClient(); - public virtual OpenAIResponseClient GetOpenAIResponseClient(string model); + public virtual ResponsesClient GetOpenAIResponseClient(string model); public virtual RealtimeClient GetRealtimeClient(); + public virtual ResponsesClient GetResponsesClient(); public virtual VectorStoreClient GetVectorStoreClient(); public virtual VideoClient GetVideoClient(); } @@ -4416,27 +4417,28 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, } public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(List input); - public bool? Background { get; set; } + public string EndUserId { get; set; } public IList Include { get; set; } public IList Input { get; } public string Instructions { get; set; } - public int? MaxOutputTokens { get; set; } + public bool? IsBackgroundModeEnabled { get; set; } + public bool? IsParallelToolCallsEnabled { get; set; } + public bool? IsStoredOutputEnabled { get; set; } + public bool? IsStreamingEnabled { get; set; } + public int? MaxOutputTokenCount { get; set; } public IDictionary Metadata { get; } - public bool? ParallelToolCalls { get; set; } + public ModelIdsResponses? Model { get; set; } [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string PreviousResponseId { get; set; } - public ResponseReasoningOptions Reasoning { get; set; } + public ResponseReasoningOptions ReasoningOptions { get; set; } public ResponseServiceTier? ServiceTier { get; set; } - public bool? Store { get; set; } - public bool? Stream { get; set; } public float? Temperature { get; set; } - public ResponseTextOptions Text { get; set; } + public ResponseTextOptions TextOptions { get; set; } public ResponseToolChoice ToolChoice { get; set; } public IList Tools { get; } public float? TopP { get; set; } - public ResponseTruncationMode? Truncation { get; set; } - public string User { get; set; } + public ResponseTruncationMode? TruncationMode { get; set; } protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -4890,43 +4892,79 @@ public enum MessageStatus { Completed = 1, Incomplete = 2 } - public class OpenAIResponseClient { - protected OpenAIResponseClient(); - protected internal OpenAIResponseClient(ClientPipeline pipeline, string model, OpenAIClientOptions options); - public OpenAIResponseClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); - public OpenAIResponseClient(string model, ApiKeyCredential credential); - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options); - public OpenAIResponseClient(string model, AuthenticationPolicy authenticationPolicy); - public OpenAIResponseClient(string model, string apiKey); - public virtual Uri Endpoint { get; } - public virtual string Model { get; } - public ClientPipeline Pipeline { get; } - public virtual ClientResult CancelResponse(string responseId, RequestOptions options); - public virtual ClientResult CancelResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); - public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); - public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); - public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public readonly partial struct ModelIdsResponses : IEquatable { + public ModelIdsResponses(string value); + public static ModelIdsResponses Chatgpt4oLatest { get; } + public static ModelIdsResponses CodexMiniLatest { get; } + public static ModelIdsResponses ComputerUsePreview { get; } + public static ModelIdsResponses ComputerUsePreview20250311 { get; } + public static ModelIdsResponses Gpt35Turbo { get; } + public static ModelIdsResponses Gpt35Turbo0125 { get; } + public static ModelIdsResponses Gpt35Turbo0301 { get; } + public static ModelIdsResponses Gpt35Turbo0613 { get; } + public static ModelIdsResponses Gpt35Turbo1106 { get; } + public static ModelIdsResponses Gpt35Turbo16k { get; } + public static ModelIdsResponses Gpt35Turbo16k0613 { get; } + public static ModelIdsResponses Gpt4 { get; } + public static ModelIdsResponses Gpt40125Preview { get; } + public static ModelIdsResponses Gpt40314 { get; } + public static ModelIdsResponses Gpt40613 { get; } + public static ModelIdsResponses Gpt41 { get; } + public static ModelIdsResponses Gpt41106Preview { get; } + public static ModelIdsResponses Gpt4120250414 { get; } + public static ModelIdsResponses Gpt41Mini { get; } + public static ModelIdsResponses Gpt41Mini20250414 { get; } + public static ModelIdsResponses Gpt41Nano { get; } + public static ModelIdsResponses Gpt41Nano20250414 { get; } + public static ModelIdsResponses Gpt432k { get; } + public static ModelIdsResponses Gpt432k0314 { get; } + public static ModelIdsResponses Gpt432k0613 { get; } + public static ModelIdsResponses Gpt4o { get; } + public static ModelIdsResponses Gpt4o20240513 { get; } + public static ModelIdsResponses Gpt4o20240806 { get; } + public static ModelIdsResponses Gpt4o20241120 { get; } + public static ModelIdsResponses Gpt4oAudioPreview { get; } + public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } + public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } + public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } + public static ModelIdsResponses Gpt4oMini { get; } + public static ModelIdsResponses Gpt4oMini20240718 { get; } + public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } + public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } + public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } + public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } + public static ModelIdsResponses Gpt4oSearchPreview { get; } + public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } + public static ModelIdsResponses Gpt4Turbo { get; } + public static ModelIdsResponses Gpt4Turbo20240409 { get; } + public static ModelIdsResponses Gpt4TurboPreview { get; } + public static ModelIdsResponses Gpt4VisionPreview { get; } + public static ModelIdsResponses O1 { get; } + public static ModelIdsResponses O120241217 { get; } + public static ModelIdsResponses O1Mini { get; } + public static ModelIdsResponses O1Mini20240912 { get; } + public static ModelIdsResponses O1Preview { get; } + public static ModelIdsResponses O1Preview20240912 { get; } + public static ModelIdsResponses O1Pro { get; } + public static ModelIdsResponses O1Pro20250319 { get; } + public static ModelIdsResponses O3 { get; } + public static ModelIdsResponses O320250416 { get; } + public static ModelIdsResponses O3Mini { get; } + public static ModelIdsResponses O3Mini20250131 { get; } + public static ModelIdsResponses O3Pro { get; } + public static ModelIdsResponses O3Pro20250610 { get; } + public static ModelIdsResponses O4Mini { get; } + public static ModelIdsResponses O4Mini20250416 { get; } + public readonly bool Equals(ModelIdsResponses other); + [EditorBrowsable(EditorBrowsableState.Never)] + public override readonly bool Equals(object obj); + [EditorBrowsable(EditorBrowsableState.Never)] + public override readonly int GetHashCode(); + public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right); + public static implicit operator ModelIdsResponses(string value); + public static implicit operator ModelIdsResponses?(string value); + public static bool operator !=(ModelIdsResponses left, ModelIdsResponses right); + public override readonly string ToString(); } public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); @@ -5141,6 +5179,20 @@ public class ResponseItem : IJsonModel, IPersistableModel, IPersistableModel { + public IList Data { get; } + public string FirstId { get; } + public bool HasMore { get; } + public string LastId { get; } + public string Object { get; } + [EditorBrowsable(EditorBrowsableState.Never)] + public ref JsonPatch Patch { get; } + protected virtual ResponseItemCollection JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + public static explicit operator ResponseItemCollection(ClientResult result); + protected virtual ResponseItemCollection PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); + } public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { public string AfterId { get; set; } public string BeforeId { get; set; } @@ -5168,20 +5220,6 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { - public IList Data { get; } - public string FirstId { get; } - public bool HasMore { get; } - public string LastId { get; } - public string Object { get; } - [EditorBrowsable(EditorBrowsableState.Never)] - public ref JsonPatch Patch { get; } - protected virtual ResponseItemList JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); - public static explicit operator ResponseItemList(ClientResult result); - protected virtual ResponseItemList PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); - } public class ResponseMessageAnnotation : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } [EditorBrowsable(EditorBrowsableState.Never)] @@ -5250,33 +5288,33 @@ public class ResponseReasoningOptions : IJsonModel, IP public override readonly string ToString(); } public class ResponseResult : IJsonModel, IPersistableModel { - public bool? Background { get; } public DateTimeOffset CreatedAt { get; } + public string EndUserId { get; } public ResponseError Error { get; } public string Id { get; } - public ResponseIncompleteStatusDetails IncompleteDetails { get; } + public ResponseIncompleteStatusDetails IncompleteStatusDetails { get; } public string Instructions { get; } - public int? MaxOutputTokens { get; } + public ModelIdsResponses? InternalModel { get; } + public bool? IsBackgroundModeEnabled { get; } + public bool IsParallelToolCallsEnabled { get; } + public int? MaxOutputTokenCount { get; } public IDictionary Metadata { get; } public string Model { get; } public string Object { get; } - public IList Output { get; } - public string OutputText { get; } - public bool ParallelToolCalls { get; } + public IList OutputItems { get; } [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string PreviousResponseId { get; } - public ResponseReasoningOptions Reasoning { get; } + public ResponseReasoningOptions ReasoningOptions { get; } public ResponseServiceTier? ServiceTier { get; } public ResponseStatus? Status { get; } public float? Temperature { get; } - public ResponseTextOptions Text { get; } + public ResponseTextOptions TextOptions { get; } public ResponseToolChoice ToolChoice { get; } public IList Tools { get; } public float? TopP { get; } - public ResponseTruncationMode? Truncation { get; } + public ResponseTruncationMode? TruncationMode { get; } public ResponseTokenUsage Usage { get; } - public string User { get; } public string GetOutputText(); protected virtual ResponseResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); @@ -5285,6 +5323,44 @@ public class ResponseResult : IJsonModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); + public virtual Task CancelResponseAsync(string responseId, RequestOptions options); + public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); + public virtual Task> CreateResponseAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); + public virtual CollectionResult CreateResponseStreaming(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult CreateResponseStreamingAsync(CreateResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); + public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); + public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); + public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); + public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); + public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual Task> GetResponseInputItemsAsync(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); + public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + } public readonly partial struct ResponseServiceTier : IEquatable { public ResponseServiceTier(string value); public static ResponseServiceTier Auto { get; } diff --git a/src/Generated/OpenAIClient.cs b/src/Generated/OpenAIClient.cs index 29f3b1ee1..c9f1a8883 100644 --- a/src/Generated/OpenAIClient.cs +++ b/src/Generated/OpenAIClient.cs @@ -17,7 +17,7 @@ public partial class OpenAIClient private readonly ApiKeyCredential _keyCredential; private const string AuthorizationHeader = "Authorization"; private const string AuthorizationApiKeyPrefix = "Bearer"; - private ResponsesClient _cachedResponseClient; + private ResponsesClient _cachedResponsesClient; protected OpenAIClient() { @@ -26,9 +26,9 @@ protected OpenAIClient() public ClientPipeline Pipeline { get; } [Experimental("OPENAI001")] - public virtual ResponsesClient GetResponseClient() + public virtual ResponsesClient GetResponsesClient() { - return Volatile.Read(ref _cachedResponseClient) ?? Interlocked.CompareExchange(ref _cachedResponseClient, new ResponsesClient(Pipeline, _endpoint), null) ?? _cachedResponseClient; + return Volatile.Read(ref _cachedResponsesClient) ?? Interlocked.CompareExchange(ref _cachedResponsesClient, new ResponsesClient(Pipeline, _endpoint), null) ?? _cachedResponsesClient; } } } diff --git a/src/Generated/ResponseClient.RestClient.cs b/src/Generated/ResponsesClient.RestClient.cs similarity index 100% rename from src/Generated/ResponseClient.RestClient.cs rename to src/Generated/ResponsesClient.RestClient.cs diff --git a/src/Generated/ResponseClient.cs b/src/Generated/ResponsesClient.cs similarity index 93% rename from src/Generated/ResponseClient.cs rename to src/Generated/ResponsesClient.cs index 9f9489138..a7558dd9a 100644 --- a/src/Generated/ResponseClient.cs +++ b/src/Generated/ResponsesClient.cs @@ -81,7 +81,7 @@ public virtual CollectionResult GetResponseInputItems(string responseId, int? li { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new ResponseClientGetResponseInputItemsCollectionResult( + return new ResponsesClientGetResponseInputItemsCollectionResult( this, responseId, limit, @@ -95,7 +95,7 @@ public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseI { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new ResponseClientGetResponseInputItemsAsyncCollectionResult( + return new ResponsesClientGetResponseInputItemsAsyncCollectionResult( this, responseId, limit, @@ -109,7 +109,7 @@ public virtual CollectionResult GetResponseInputItems(string respo { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new ResponseClientGetResponseInputItemsCollectionResultOfT( + return new ResponsesClientGetResponseInputItemsCollectionResultOfT( this, responseId, options?.PageSizeLimit, @@ -123,7 +123,7 @@ public virtual AsyncCollectionResult GetResponseInputItemsAsync(st { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - return new ResponseClientGetResponseInputItemsAsyncCollectionResultOfT( + return new ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT( this, responseId, options?.PageSizeLimit, diff --git a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs b/src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResult.cs similarity index 88% rename from src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs rename to src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResult.cs index 91d8196a5..8116baa51 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResult.cs +++ b/src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResult.cs @@ -10,7 +10,7 @@ namespace OpenAI.Responses { - internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResult : AsyncCollectionResult + internal partial class ResponsesClientGetResponseInputItemsAsyncCollectionResult : AsyncCollectionResult { private readonly ResponsesClient _client; private readonly string _responseId; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResult private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsAsyncCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponsesClientGetResponseInputItemsAsyncCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs b/src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT.cs similarity index 89% rename from src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs rename to src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT.cs index c07dadeee..3968475dd 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsAsyncCollectionResultOfT.cs +++ b/src/Generated/ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT.cs @@ -11,7 +11,7 @@ namespace OpenAI.Responses { - internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResultOfT : AsyncCollectionResult + internal partial class ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT : AsyncCollectionResult { private readonly ResponsesClient _client; private readonly string _responseId; @@ -21,7 +21,7 @@ internal partial class ResponseClientGetResponseInputItemsAsyncCollectionResultO private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsAsyncCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponsesClientGetResponseInputItemsAsyncCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs b/src/Generated/ResponsesClientGetResponseInputItemsCollectionResult.cs similarity index 88% rename from src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs rename to src/Generated/ResponsesClientGetResponseInputItemsCollectionResult.cs index 2ab6961d1..704dd5dbb 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsCollectionResult.cs +++ b/src/Generated/ResponsesClientGetResponseInputItemsCollectionResult.cs @@ -10,7 +10,7 @@ namespace OpenAI.Responses { - internal partial class ResponseClientGetResponseInputItemsCollectionResult : CollectionResult + internal partial class ResponsesClientGetResponseInputItemsCollectionResult : CollectionResult { private readonly ResponsesClient _client; private readonly string _responseId; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsCollectionResult : Col private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponsesClientGetResponseInputItemsCollectionResult(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); diff --git a/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs b/src/Generated/ResponsesClientGetResponseInputItemsCollectionResultOfT.cs similarity index 88% rename from src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs rename to src/Generated/ResponsesClientGetResponseInputItemsCollectionResultOfT.cs index 562f740fc..16f4d2ce7 100644 --- a/src/Generated/ResponseClientGetResponseInputItemsCollectionResultOfT.cs +++ b/src/Generated/ResponsesClientGetResponseInputItemsCollectionResultOfT.cs @@ -10,7 +10,7 @@ namespace OpenAI.Responses { - internal partial class ResponseClientGetResponseInputItemsCollectionResultOfT : CollectionResult + internal partial class ResponsesClientGetResponseInputItemsCollectionResultOfT : CollectionResult { private readonly ResponsesClient _client; private readonly string _responseId; @@ -20,7 +20,7 @@ internal partial class ResponseClientGetResponseInputItemsCollectionResultOfT : private readonly string _before; private readonly RequestOptions _options; - public ResponseClientGetResponseInputItemsCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) + public ResponsesClientGetResponseInputItemsCollectionResultOfT(ResponsesClient client, string responseId, int? limit, string order, string after, string before, RequestOptions options) { Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); From 24c52bb0607b82c365560735d032e4e2f6f4d35e Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 13 Nov 2025 10:41:11 -0600 Subject: [PATCH 12/15] fb --- api/OpenAI.net8.0.cs | 69 ++++++++++++++++++++++++++++++++++-- api/OpenAI.netstandard2.0.cs | 67 +++++++++++++++++++++++++++++++++- src/Custom/OpenAIClient.cs | 2 +- 3 files changed, 134 insertions(+), 4 deletions(-) diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 47e63ef44..5dbaa5c0e 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -33,13 +33,13 @@ public class OpenAIClient { public virtual ModerationClient GetModerationClient(string model); public virtual OpenAIFileClient GetOpenAIFileClient(); public virtual OpenAIModelClient GetOpenAIModelClient(); - [Experimental("OPENAI001")] - public virtual ResponsesClient GetOpenAIResponseClient(string model); [Experimental("OPENAI002")] public virtual RealtimeClient GetRealtimeClient(); [Experimental("OPENAI001")] public virtual ResponsesClient GetResponsesClient(); [Experimental("OPENAI001")] + public virtual ResponsesClient GetResponsesClient(string model); + [Experimental("OPENAI001")] public virtual VectorStoreClient GetVectorStoreClient(); [Experimental("OPENAI001")] public virtual VideoClient GetVideoClient(); @@ -1493,6 +1493,7 @@ public class ChatAudioOptions : IJsonModel, IPersistableModel< public ChatAudioOptions(ChatOutputAudioVoice outputAudioVoice, ChatOutputAudioFormat outputAudioFormat); public ChatOutputAudioFormat OutputAudioFormat { get; } public ChatOutputAudioVoice OutputAudioVoice { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1580,6 +1581,7 @@ public class ChatCompletion : IJsonModel, IPersistableModel, IPersistableModel { public string ChatCompletionId { get; } public bool Deleted { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1651,6 +1655,7 @@ public class ChatCompletionMessageCollectionOptions : IJsonModel ContentParts { get; } public string Id { get; } public ChatOutputAudio OutputAudio { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1708,6 +1714,7 @@ public class ChatCompletionOptions : IJsonModel, IPersist public IDictionary Metadata { get; } [Experimental("OPENAI001")] public ChatOutputPrediction OutputPrediction { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1752,6 +1759,7 @@ public class ChatFunction : IJsonModel, IPersistableModel, IPersistableModel< public ChatFunctionCall(string functionName, BinaryData functionArguments); public BinaryData FunctionArguments { get; } public string FunctionName { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1783,6 +1792,7 @@ public class ChatFunctionCall : IJsonModel, IPersistableModel< } [Obsolete("This class is obsolete. Please use ChatToolChoice instead.")] public class ChatFunctionChoice : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1833,6 +1843,7 @@ public class ChatFunctionChoice : IJsonModel, IPersistableMo public class ChatInputTokenUsageDetails : IJsonModel, IPersistableModel { public int AudioTokenCount { get; } public int CachedTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1847,6 +1858,7 @@ public class ChatInputTokenUsageDetails : IJsonModel } public class ChatMessage : IJsonModel, IPersistableModel { public ChatMessageContent Content { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1888,6 +1900,7 @@ public class ChatMessage : IJsonModel, IPersistableModel, IPersistableModel { public int EndIndex { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1923,6 +1936,7 @@ public class ChatMessageContentPart : IJsonModel, IPersi [Experimental("OPENAI001")] public ChatInputAudioFormat? InputAudioFormat { get; } public ChatMessageContentPartKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -1968,6 +1982,7 @@ public class ChatOutputAudio : IJsonModel, IPersistableModel, IPersistableModel, IPersistableModel { public ChatOutputAudioReference(string id); public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2036,6 +2052,7 @@ public class ChatOutputAudioReference : IJsonModel, IP } [Experimental("OPENAI001")] public class ChatOutputPrediction : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2050,6 +2067,7 @@ public class ChatOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2126,6 +2145,7 @@ public enum ChatResponseModalities { } public class ChatTokenLogProbabilityDetails : IJsonModel, IPersistableModel { public float LogProbability { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2143,6 +2163,7 @@ public class ChatTokenLogProbabilityDetails : IJsonModel, IPersistableModel { public float LogProbability { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2162,6 +2183,7 @@ public class ChatTokenUsage : IJsonModel, IPersistableModel, IPersistableModel { public BinaryData FunctionParameters { get; } public bool? FunctionSchemaIsStrict { get; } public ChatToolKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2199,6 +2222,7 @@ public class ChatToolCall : IJsonModel, IPersistableModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2237,6 +2262,7 @@ public enum ChatToolKind { } [Experimental("OPENAI001")] public class ChatWebSearchOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2313,6 +2339,7 @@ public class StreamingChatCompletionUpdate : IJsonModel, IPersistableModel { public BinaryData FunctionArgumentsUpdate { get; } public string FunctionName { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2354,6 +2382,7 @@ public class StreamingChatOutputAudioUpdate : IJsonModel, IPersistableModel { public int? Dimensions { get; set; } public string EndUserId { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2695,6 +2726,7 @@ public class EmbeddingGenerationOptions : IJsonModel } public class EmbeddingTokenUsage : IJsonModel, IPersistableModel { public int InputTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2710,6 +2742,7 @@ public class EmbeddingTokenUsage : IJsonModel, IPersistable } public class OpenAIEmbedding : IJsonModel, IPersistableModel { public int Index { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -2725,6 +2758,7 @@ public class OpenAIEmbedding : IJsonModel, IPersistableModel, IJsonModel, IPersistableModel { public string Model { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -4836,6 +4870,7 @@ public class CodeInterpreterCallLogsOutput : CodeInterpreterCallOutput, IJsonMod } [Experimental("OPENAI001")] public class CodeInterpreterCallOutput : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -4879,6 +4914,7 @@ public class CodeInterpreterToolContainer : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -4907,6 +4944,7 @@ public class ComputerCallAction : IJsonModel, IPersistableMo public IList KeyPressKeyCodes { get; } public ComputerCallActionKind Kind { get; } public Drawing.Point? MoveCoordinates { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -4950,6 +4988,7 @@ public enum ComputerCallActionMouseButton { } [Experimental("OPENAICUA001")] public class ComputerCallOutput : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -4997,6 +5036,7 @@ public class ComputerCallSafetyCheck : IJsonModel, IPer public string Code { get; set; } public string Id { get; set; } public string Message { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5088,6 +5128,7 @@ public class CreateResponseOptions : IJsonModel, IPersist } [Experimental("OPENAI001")] public class CustomMcpToolCallApprovalPolicy : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5135,6 +5176,7 @@ public class FileSearchCallResult : IJsonModel, IPersistab public IDictionary Attributes { get; } public string FileId { get; set; } public string Filename { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5183,6 +5225,7 @@ public class FileSearchTool : ResponseTool, IJsonModel, IPersist } [Experimental("OPENAI001")] public class FileSearchToolRankingOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5345,6 +5388,7 @@ public class ImageGenerationToolInputImageMask : IJsonModel, public McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy globalPolicy); public CustomMcpToolCallApprovalPolicy CustomPolicy { get; } public GlobalMcpToolCallApprovalPolicy? GlobalPolicy { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5523,6 +5568,7 @@ public class McpToolDefinition : IJsonModel, IPersistableMode public string Description { get; set; } public BinaryData InputSchema { get; set; } public string Name { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5545,6 +5591,7 @@ public class McpToolDefinitionListItem : ResponseItem, IJsonModel, IPersistableModel { public bool? IsReadOnly { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5681,6 +5728,7 @@ public enum ReasoningStatus { } [Experimental("OPENAI001")] public class ReasoningSummaryPart : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5717,6 +5765,7 @@ public class ResponseContentPart : IJsonModel, IPersistable public string InputImageFileId { get; } public ResponseContentPartKind Kind { get; } public IReadOnlyList OutputTextAnnotations { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5748,6 +5797,7 @@ public enum ResponseContentPartKind { public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5761,6 +5811,7 @@ public class ResponseDeletionResult : IJsonModel, IPersi public class ResponseError : IJsonModel, IPersistableModel { public ResponseErrorCode Code { get; } public string Message { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5820,6 +5871,7 @@ public class ResponseError : IJsonModel, IPersistableModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5848,6 +5900,7 @@ public class ResponseIncompleteStatusDetails : IJsonModel, IPersistableModel { public int CachedTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5859,6 +5912,7 @@ public class ResponseInputTokenUsageDetails : IJsonModel, IPersistableModel { public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5913,6 +5967,7 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5957,6 +6013,7 @@ public enum ResponseMessageAnnotationKind { } [Experimental("OPENAI001")] public class ResponseOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5986,6 +6043,7 @@ public class ResponseOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6122,6 +6180,7 @@ public enum ResponseStatus { [Experimental("OPENAI001")] public class ResponseTextFormat : IJsonModel, IPersistableModel { public ResponseTextFormatKind Kind { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6142,6 +6201,7 @@ public enum ResponseTextFormatKind { } [Experimental("OPENAI001")] public class ResponseTextOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6157,6 +6217,7 @@ public class ResponseTokenUsage : IJsonModel, IPersistableMo public ResponseInputTokenUsageDetails InputTokenDetails { get; } public int OutputTokenCount { get; } public ResponseOutputTokenUsageDetails OutputTokenDetails { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6168,6 +6229,7 @@ public class ResponseTokenUsage : IJsonModel, IPersistableMo } [Experimental("OPENAI001")] public class ResponseTool : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6654,6 +6716,7 @@ public class StreamingResponseTextAnnotationAddedUpdate : StreamingResponseUpdat } [Experimental("OPENAI001")] public class StreamingResponseUpdate : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6771,6 +6834,7 @@ public class WebSearchToolApproximateLocation : WebSearchToolLocation, IJsonMode [Experimental("OPENAI001")] public class WebSearchToolFilters : IJsonModel, IPersistableModel { public IList AllowedDomains { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -6781,6 +6845,7 @@ public class WebSearchToolFilters : IJsonModel, IPersistab } [Experimental("OPENAI001")] public class WebSearchToolLocation : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index c44ad7f18..e048d2b42 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -23,9 +23,9 @@ public class OpenAIClient { public virtual ModerationClient GetModerationClient(string model); public virtual OpenAIFileClient GetOpenAIFileClient(); public virtual OpenAIModelClient GetOpenAIModelClient(); - public virtual ResponsesClient GetOpenAIResponseClient(string model); public virtual RealtimeClient GetRealtimeClient(); public virtual ResponsesClient GetResponsesClient(); + public virtual ResponsesClient GetResponsesClient(string model); public virtual VectorStoreClient GetVectorStoreClient(); public virtual VideoClient GetVideoClient(); } @@ -1340,6 +1340,7 @@ public class ChatAudioOptions : IJsonModel, IPersistableModel< public ChatAudioOptions(ChatOutputAudioVoice outputAudioVoice, ChatOutputAudioFormat outputAudioFormat); public ChatOutputAudioFormat OutputAudioFormat { get; } public ChatOutputAudioVoice OutputAudioVoice { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatAudioOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1400,6 +1401,7 @@ public class ChatCompletion : IJsonModel, IPersistableModel, IPersistableModel { public string ChatCompletionId { get; } public bool Deleted { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatCompletionDeletionResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1458,6 +1462,7 @@ public class ChatCompletionMessageCollectionOptions : IJsonModel ContentParts { get; } public string Id { get; } public ChatOutputAudio OutputAudio { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string Refusal { get; } @@ -1509,6 +1515,7 @@ public class ChatCompletionOptions : IJsonModel, IPersist public int? MaxOutputTokenCount { get; set; } public IDictionary Metadata { get; } public ChatOutputPrediction OutputPrediction { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public float? PresencePenalty { get; set; } @@ -1543,6 +1550,7 @@ public class ChatFunction : IJsonModel, IPersistableModel, IPersistableModel< public ChatFunctionCall(string functionName, BinaryData functionArguments); public BinaryData FunctionArguments { get; } public string FunctionName { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatFunctionCall JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1564,6 +1573,7 @@ public class ChatFunctionCall : IJsonModel, IPersistableModel< } [Obsolete("This class is obsolete. Please use ChatToolChoice instead.")] public class ChatFunctionChoice : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ChatFunctionChoice CreateAutoChoice(); @@ -1608,6 +1618,7 @@ public class ChatFunctionChoice : IJsonModel, IPersistableMo public class ChatInputTokenUsageDetails : IJsonModel, IPersistableModel { public int AudioTokenCount { get; } public int CachedTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatInputTokenUsageDetails JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1617,6 +1628,7 @@ public class ChatInputTokenUsageDetails : IJsonModel } public class ChatMessage : IJsonModel, IPersistableModel { public ChatMessageContent Content { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static AssistantChatMessage CreateAssistantMessage(ChatCompletion chatCompletion); @@ -1648,6 +1660,7 @@ public class ChatMessage : IJsonModel, IPersistableModel, IPersistableModel { public int EndIndex { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int StartIndex { get; } @@ -1676,6 +1689,7 @@ public class ChatMessageContentPart : IJsonModel, IPersi public BinaryData InputAudioBytes { get; } public ChatInputAudioFormat? InputAudioFormat { get; } public ChatMessageContentPartKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string Refusal { get; } @@ -1712,6 +1726,7 @@ public class ChatOutputAudio : IJsonModel, IPersistableModel, IPersistableModel, IPersistableModel { public ChatOutputAudioReference(string id); public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatOutputAudioReference JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1774,6 +1790,7 @@ public class ChatOutputAudioReference : IJsonModel, IP public override readonly string ToString(); } public class ChatOutputPrediction : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ChatOutputPrediction CreateStaticContentPrediction(IEnumerable staticContentParts); @@ -1786,6 +1803,7 @@ public class ChatOutputPrediction : IJsonModel, IPersistab public class ChatOutputTokenUsageDetails : IJsonModel, IPersistableModel { public int AcceptedPredictionTokenCount { get; } public int AudioTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int ReasoningTokenCount { get; } @@ -1813,6 +1831,7 @@ public class ChatOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ChatResponseFormat CreateJsonObjectFormat(); @@ -1848,6 +1867,7 @@ public enum ChatResponseModalities { } public class ChatTokenLogProbabilityDetails : IJsonModel, IPersistableModel { public float LogProbability { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string Token { get; } @@ -1860,6 +1880,7 @@ public class ChatTokenLogProbabilityDetails : IJsonModel, IPersistableModel { public float LogProbability { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string Token { get; } @@ -1874,6 +1895,7 @@ public class ChatTokenUsage : IJsonModel, IPersistableModel, IPersistableModel { public BinaryData FunctionParameters { get; } public bool? FunctionSchemaIsStrict { get; } public ChatToolKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ChatTool CreateFunctionTool(string functionName, string functionDescription = null, BinaryData functionParameters = null, bool? functionSchemaIsStrict = null); @@ -1901,6 +1924,7 @@ public class ChatToolCall : IJsonModel, IPersistableModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ChatToolChoice CreateAutoChoice(); @@ -1928,6 +1953,7 @@ public enum ChatToolKind { Function = 0 } public class ChatWebSearchOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ChatWebSearchOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -1990,6 +2016,7 @@ public class StreamingChatCompletionUpdate : IJsonModel RefusalTokenLogProbabilities { get; } @@ -2008,6 +2035,7 @@ public class StreamingChatCompletionUpdate : IJsonModel, IPersistableModel { public BinaryData FunctionArgumentsUpdate { get; } public string FunctionName { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual StreamingChatFunctionCallUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -2019,6 +2047,7 @@ public class StreamingChatOutputAudioUpdate : IJsonModel, IPersistableModel { public int? Dimensions { get; set; } public string EndUserId { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual EmbeddingGenerationOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -2317,6 +2348,7 @@ public class EmbeddingGenerationOptions : IJsonModel } public class EmbeddingTokenUsage : IJsonModel, IPersistableModel { public int InputTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int TotalTokenCount { get; } @@ -2327,6 +2359,7 @@ public class EmbeddingTokenUsage : IJsonModel, IPersistable } public class OpenAIEmbedding : IJsonModel, IPersistableModel { public int Index { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual OpenAIEmbedding JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -2337,6 +2370,7 @@ public class OpenAIEmbedding : IJsonModel, IPersistableModel, IJsonModel, IPersistableModel { public string Model { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public EmbeddingTokenUsage Usage { get; } @@ -4220,6 +4254,7 @@ public class CodeInterpreterCallLogsOutput : CodeInterpreterCallOutput, IJsonMod protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class CodeInterpreterCallOutput : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual CodeInterpreterCallOutput JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -4258,6 +4293,7 @@ public class CodeInterpreterToolContainer : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static AutomaticCodeInterpreterToolContainerConfiguration CreateAutomaticContainerConfiguration(IEnumerable fileIds = null); @@ -4282,6 +4319,7 @@ public class ComputerCallAction : IJsonModel, IPersistableMo public IList KeyPressKeyCodes { get; } public ComputerCallActionKind Kind { get; } public Drawing.Point? MoveCoordinates { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public Drawing.Point? ScrollCoordinates { get; } @@ -4321,6 +4359,7 @@ public enum ComputerCallActionMouseButton { Forward = 4 } public class ComputerCallOutput : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ComputerCallOutput CreateScreenshotOutput(BinaryData screenshotImageBytes, string screenshotImageBytesMediaType); @@ -4363,6 +4402,7 @@ public class ComputerCallSafetyCheck : IJsonModel, IPer public string Code { get; set; } public string Id { get; set; } public string Message { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ComputerCallSafetyCheck JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -4446,6 +4486,7 @@ public class CreateResponseOptions : IJsonModel, IPersist protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class CustomMcpToolCallApprovalPolicy : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public McpToolFilter ToolsAlwaysRequiringApproval { get; set; } @@ -4488,6 +4529,7 @@ public class FileSearchCallResult : IJsonModel, IPersistab public IDictionary Attributes { get; } public string FileId { get; set; } public string Filename { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public float? Score { get; set; } @@ -4531,6 +4573,7 @@ public class FileSearchTool : ResponseTool, IJsonModel, IPersist public override readonly string ToString(); } public class FileSearchToolRankingOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public FileSearchToolRanker? Ranker { get; set; } @@ -4680,6 +4723,7 @@ public class ImageGenerationToolInputImageMask : IJsonModel, public McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy globalPolicy); public CustomMcpToolCallApprovalPolicy CustomPolicy { get; } public GlobalMcpToolCallApprovalPolicy? GlobalPolicy { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual McpToolCallApprovalPolicy JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -4844,6 +4889,7 @@ public class McpToolDefinition : IJsonModel, IPersistableMode public string Description { get; set; } public BinaryData InputSchema { get; set; } public string Name { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual McpToolDefinition JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -4863,6 +4909,7 @@ public class McpToolDefinitionListItem : ResponseItem, IJsonModel, IPersistableModel { public bool? IsReadOnly { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public IList ToolNames { get; } @@ -4990,6 +5037,7 @@ public enum ReasoningStatus { Incomplete = 2 } public class ReasoningSummaryPart : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ReasoningSummaryTextPart CreateTextPart(string text); @@ -5022,6 +5070,7 @@ public class ResponseContentPart : IJsonModel, IPersistable public string InputImageFileId { get; } public ResponseContentPartKind Kind { get; } public IReadOnlyList OutputTextAnnotations { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string Refusal { get; } @@ -5050,6 +5099,7 @@ public enum ResponseContentPartKind { public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ResponseDeletionResult JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -5061,6 +5111,7 @@ public class ResponseDeletionResult : IJsonModel, IPersi public class ResponseError : IJsonModel, IPersistableModel { public ResponseErrorCode Code { get; } public string Message { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ResponseError JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -5116,6 +5167,7 @@ public class ResponseError : IJsonModel, IPersistableModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public ResponseIncompleteStatusReason? Reason { get; } @@ -5141,6 +5193,7 @@ public class ResponseIncompleteStatusDetails : IJsonModel, IPersistableModel { public int CachedTokenCount { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ResponseInputTokenUsageDetails JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -5150,6 +5203,7 @@ public class ResponseInputTokenUsageDetails : IJsonModel, IPersistableModel { public string Id { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static MessageResponseItem CreateAssistantMessageItem(IEnumerable contentParts); @@ -5198,6 +5252,7 @@ public class ResponseItemCollectionOptions : IJsonModel, IPersistableModel { public ResponseMessageAnnotationKind Kind { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual ResponseMessageAnnotation JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -5236,6 +5292,7 @@ public enum ResponseMessageAnnotationKind { ContainerFileCitation = 3 } public class ResponseOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int ReasoningTokenCount { get; } @@ -5262,6 +5319,7 @@ public class ResponseOutputTokenUsageDetails : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public ResponseReasoningEffortLevel? ReasoningEffortLevel { get; set; } @@ -5388,6 +5446,7 @@ public enum ResponseStatus { } public class ResponseTextFormat : IJsonModel, IPersistableModel { public ResponseTextFormatKind Kind { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static ResponseTextFormat CreateJsonObjectFormat(); @@ -5405,6 +5464,7 @@ public enum ResponseTextFormatKind { JsonSchema = 3 } public class ResponseTextOptions : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public ResponseTextFormat TextFormat { get; set; } @@ -5418,6 +5478,7 @@ public class ResponseTokenUsage : IJsonModel, IPersistableMo public ResponseInputTokenUsageDetails InputTokenDetails { get; } public int OutputTokenCount { get; } public ResponseOutputTokenUsageDetails OutputTokenDetails { get; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int TotalTokenCount { get; } @@ -5427,6 +5488,7 @@ public class ResponseTokenUsage : IJsonModel, IPersistableMo protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class ResponseTool : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static CodeInterpreterTool CreateCodeInterpreterTool(CodeInterpreterToolContainer container); @@ -5862,6 +5924,7 @@ public class StreamingResponseTextAnnotationAddedUpdate : StreamingResponseUpdat protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseUpdate : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public int SequenceNumber { get; } @@ -5967,6 +6030,7 @@ public class WebSearchToolApproximateLocation : WebSearchToolLocation, IJsonMode } public class WebSearchToolFilters : IJsonModel, IPersistableModel { public IList AllowedDomains { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } protected virtual WebSearchToolFilters JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); @@ -5975,6 +6039,7 @@ public class WebSearchToolFilters : IJsonModel, IPersistab protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class WebSearchToolLocation : IJsonModel, IPersistableModel { + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public static WebSearchToolApproximateLocation CreateApproximateLocation(string country = null, string region = null, string city = null, string timezone = null); diff --git a/src/Custom/OpenAIClient.cs b/src/Custom/OpenAIClient.cs index 90472bbf9..c1f2b76b3 100644 --- a/src/Custom/OpenAIClient.cs +++ b/src/Custom/OpenAIClient.cs @@ -341,7 +341,7 @@ protected internal OpenAIClient(ClientPipeline pipeline, OpenAIClientOptions opt /// /// A new . [Experimental("OPENAI001")] - public virtual ResponsesClient GetOpenAIResponseClient(string model) => new(Pipeline, model, _options); + public virtual ResponsesClient GetResponsesClient(string model) => new(Pipeline, model, _options); /// /// Gets a new instance of that reuses the client configuration details provided to From 5b20b5a1c16af807f06d11aed4d41cf917e4fd35 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Wed, 19 Nov 2025 17:16:35 -0600 Subject: [PATCH 13/15] fb --- api/OpenAI.net8.0.cs | 104 +- api/OpenAI.netstandard2.0.cs | 101 +- .../Responses/Example01_SimpleResponse.cs | 4 +- .../Example01_SimpleResponseAsync.cs | 4 +- .../Example02_SimpleResponseStreaming.cs | 4 +- .../Example02_SimpleResponseStreamingAsync.cs | 4 +- .../Responses/Example03_FunctionCalling.cs | 4 +- .../Example03_FunctionCallingAsync.cs | 4 +- .../Example04_FunctionCallingStreaming.cs | 4 +- ...Example04_FunctionCallingStreamingAsync.cs | 4 +- examples/Responses/Example05_RemoteMcp.cs | 4 +- .../Responses/Example05_RemoteMcpAsync.cs | 4 +- .../Example06_RemoteMcpAuthentication.cs | 4 +- .../Example06_RemoteMcpAuthenticationAsync.cs | 4 +- .../Example07_InputAdditionalProperties.cs | 4 +- ...xample07_InputAdditionalPropertiesAsync.cs | 4 +- .../Example08_OutputAdditionalProperties.cs | 4 +- ...ample08_OutputAdditionalPropertiesAsync.cs | 4 +- .../Example09_ModelOverridePerRequest.cs | 4 +- .../Example09_ModelOverridePerRequestAsync.cs | 4 +- .../Responses/Example10_CodeInterpreter.cs | 4 +- .../Example10_CodeInterpreterAsync.cs | 4 +- .../client/models/responses.models.tsp | 5 + specification/client/responses.client.tsp | 1 + src/Custom/OpenAIClient.cs | 2 +- .../CreateResponseOptions.Serialization.cs | 1170 ++--- src/Custom/Responses/CreateResponseOptions.cs | 161 +- src/Custom/Responses/ModelIdResponses.cs | 2 +- ...> ResponseItemCollection.Serialization.cs} | 0 ...eItemList.cs => ResponseItemCollection.cs} | 1 + src/Custom/Responses/ResponseResult.cs | 2 +- src/Custom/Responses/ResponsesClient.cs | 92 +- src/Generated/Models/OpenAIContext.cs | 1 + .../CreateResponseOptions.Serialization.cs | 615 +++ .../Models/Responses/CreateResponseOptions.cs | 82 + .../Models/Responses/ModelIdsResponses.cs | 126 +- src/Generated/OpenAIClient.cs | 8 - src/Generated/OpenAIModelFactory.cs | 31 + tests/Responses/ResponseStoreTests.cs | 16 +- tests/Responses/ResponsesTests.cs | 86 +- tests/Responses/ResponsesToolTests.cs | 40 +- tests/Utility/TestHelpers.cs | 2 +- tspCodeModel.json | 4404 +++++++++-------- 43 files changed, 4037 insertions(+), 3095 deletions(-) rename src/Custom/Responses/{ResponseItemList.Serialization.cs => ResponseItemCollection.Serialization.cs} (100%) rename src/Custom/Responses/{ResponseItemList.cs => ResponseItemCollection.cs} (97%) create mode 100644 src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs create mode 100644 src/Generated/Models/Responses/CreateResponseOptions.cs diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 5dbaa5c0e..4fca2fda2 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -38,8 +38,6 @@ public class OpenAIClient { [Experimental("OPENAI001")] public virtual ResponsesClient GetResponsesClient(); [Experimental("OPENAI001")] - public virtual ResponsesClient GetResponsesClient(string model); - [Experimental("OPENAI001")] public virtual VectorStoreClient GetVectorStoreClient(); [Experimental("OPENAI001")] public virtual VideoClient GetVideoClient(); @@ -5096,10 +5094,9 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, } [Experimental("OPENAI001")] public class CreateResponseOptions : IJsonModel, IPersistableModel { - public CreateResponseOptions(List input); - public string EndUserId { get; set; } - public IList Include { get; set; } - public IList Input { get; } + public CreateResponseOptions(IEnumerable inputItems, string model); + public IList IncludedProperties { get; } + public IList InputItems { get; } public string Instructions { get; set; } public bool? IsBackgroundModeEnabled { get; set; } public bool? IsParallelToolCallsEnabled { get; set; } @@ -5107,7 +5104,8 @@ public class CreateResponseOptions : IJsonModel, IPersist public bool? IsStreamingEnabled { get; set; } public int? MaxOutputTokenCount { get; set; } public IDictionary Metadata { get; } - public ModelIdsResponses? Model { get; set; } + public string Model { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] public ref JsonPatch Patch { get; } @@ -5120,6 +5118,7 @@ public class CreateResponseOptions : IJsonModel, IPersist public IList Tools { get; } public float? TopP { get; set; } public ResponseTruncationMode? TruncationMode { get; set; } + public string User { get; set; } protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -5626,81 +5625,6 @@ public enum MessageStatus { Incomplete = 2 } [Experimental("OPENAI001")] - public readonly partial struct ModelIdsResponses : IEquatable { - public ModelIdsResponses(string value); - public static ModelIdsResponses Chatgpt4oLatest { get; } - public static ModelIdsResponses CodexMiniLatest { get; } - public static ModelIdsResponses ComputerUsePreview { get; } - public static ModelIdsResponses ComputerUsePreview20250311 { get; } - public static ModelIdsResponses Gpt35Turbo { get; } - public static ModelIdsResponses Gpt35Turbo0125 { get; } - public static ModelIdsResponses Gpt35Turbo0301 { get; } - public static ModelIdsResponses Gpt35Turbo0613 { get; } - public static ModelIdsResponses Gpt35Turbo1106 { get; } - public static ModelIdsResponses Gpt35Turbo16k { get; } - public static ModelIdsResponses Gpt35Turbo16k0613 { get; } - public static ModelIdsResponses Gpt4 { get; } - public static ModelIdsResponses Gpt40125Preview { get; } - public static ModelIdsResponses Gpt40314 { get; } - public static ModelIdsResponses Gpt40613 { get; } - public static ModelIdsResponses Gpt41 { get; } - public static ModelIdsResponses Gpt41106Preview { get; } - public static ModelIdsResponses Gpt4120250414 { get; } - public static ModelIdsResponses Gpt41Mini { get; } - public static ModelIdsResponses Gpt41Mini20250414 { get; } - public static ModelIdsResponses Gpt41Nano { get; } - public static ModelIdsResponses Gpt41Nano20250414 { get; } - public static ModelIdsResponses Gpt432k { get; } - public static ModelIdsResponses Gpt432k0314 { get; } - public static ModelIdsResponses Gpt432k0613 { get; } - public static ModelIdsResponses Gpt4o { get; } - public static ModelIdsResponses Gpt4o20240513 { get; } - public static ModelIdsResponses Gpt4o20240806 { get; } - public static ModelIdsResponses Gpt4o20241120 { get; } - public static ModelIdsResponses Gpt4oAudioPreview { get; } - public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } - public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } - public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } - public static ModelIdsResponses Gpt4oMini { get; } - public static ModelIdsResponses Gpt4oMini20240718 { get; } - public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } - public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } - public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } - public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } - public static ModelIdsResponses Gpt4oSearchPreview { get; } - public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } - public static ModelIdsResponses Gpt4Turbo { get; } - public static ModelIdsResponses Gpt4Turbo20240409 { get; } - public static ModelIdsResponses Gpt4TurboPreview { get; } - public static ModelIdsResponses Gpt4VisionPreview { get; } - public static ModelIdsResponses O1 { get; } - public static ModelIdsResponses O120241217 { get; } - public static ModelIdsResponses O1Mini { get; } - public static ModelIdsResponses O1Mini20240912 { get; } - public static ModelIdsResponses O1Preview { get; } - public static ModelIdsResponses O1Preview20240912 { get; } - public static ModelIdsResponses O1Pro { get; } - public static ModelIdsResponses O1Pro20250319 { get; } - public static ModelIdsResponses O3 { get; } - public static ModelIdsResponses O320250416 { get; } - public static ModelIdsResponses O3Mini { get; } - public static ModelIdsResponses O3Mini20250131 { get; } - public static ModelIdsResponses O3Pro { get; } - public static ModelIdsResponses O3Pro20250610 { get; } - public static ModelIdsResponses O4Mini { get; } - public static ModelIdsResponses O4Mini20250416 { get; } - public readonly bool Equals(ModelIdsResponses other); - [EditorBrowsable(EditorBrowsableState.Never)] - public override readonly bool Equals(object obj); - [EditorBrowsable(EditorBrowsableState.Never)] - public override readonly int GetHashCode(); - public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right); - public static implicit operator ModelIdsResponses(string value); - public static implicit operator ModelIdsResponses?(string value); - public static bool operator !=(ModelIdsResponses left, ModelIdsResponses right); - public override readonly string ToString(); - } - [Experimental("OPENAI001")] public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); @@ -5951,6 +5875,7 @@ public class ResponseItemCollection : IJsonModel, IPersi public string FirstId { get; } public bool HasMore { get; } public string LastId { get; } + [EditorBrowsable(EditorBrowsableState.Never)] public string Object { get; } [EditorBrowsable(EditorBrowsableState.Never)] [Experimental("SCME0001")] @@ -6079,7 +6004,6 @@ public class ResponseResult : IJsonModel, IPersistableModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index e048d2b42..37363cd6e 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -25,7 +25,6 @@ public class OpenAIClient { public virtual OpenAIModelClient GetOpenAIModelClient(); public virtual RealtimeClient GetRealtimeClient(); public virtual ResponsesClient GetResponsesClient(); - public virtual ResponsesClient GetResponsesClient(string model); public virtual VectorStoreClient GetVectorStoreClient(); public virtual VideoClient GetVideoClient(); } @@ -4456,10 +4455,9 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class CreateResponseOptions : IJsonModel, IPersistableModel { - public CreateResponseOptions(List input); - public string EndUserId { get; set; } - public IList Include { get; set; } - public IList Input { get; } + public CreateResponseOptions(IEnumerable inputItems, string model); + public IList IncludedProperties { get; } + public IList InputItems { get; } public string Instructions { get; set; } public bool? IsBackgroundModeEnabled { get; set; } public bool? IsParallelToolCallsEnabled { get; set; } @@ -4467,7 +4465,8 @@ public class CreateResponseOptions : IJsonModel, IPersist public bool? IsStreamingEnabled { get; set; } public int? MaxOutputTokenCount { get; set; } public IDictionary Metadata { get; } - public ModelIdsResponses? Model { get; set; } + public string Model { get; set; } + [Serialization.JsonIgnore] [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } public string PreviousResponseId { get; set; } @@ -4479,6 +4478,7 @@ public class CreateResponseOptions : IJsonModel, IPersist public IList Tools { get; } public float? TopP { get; set; } public ResponseTruncationMode? TruncationMode { get; set; } + public string User { get; set; } protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions); @@ -4939,80 +4939,6 @@ public enum MessageStatus { Completed = 1, Incomplete = 2 } - public readonly partial struct ModelIdsResponses : IEquatable { - public ModelIdsResponses(string value); - public static ModelIdsResponses Chatgpt4oLatest { get; } - public static ModelIdsResponses CodexMiniLatest { get; } - public static ModelIdsResponses ComputerUsePreview { get; } - public static ModelIdsResponses ComputerUsePreview20250311 { get; } - public static ModelIdsResponses Gpt35Turbo { get; } - public static ModelIdsResponses Gpt35Turbo0125 { get; } - public static ModelIdsResponses Gpt35Turbo0301 { get; } - public static ModelIdsResponses Gpt35Turbo0613 { get; } - public static ModelIdsResponses Gpt35Turbo1106 { get; } - public static ModelIdsResponses Gpt35Turbo16k { get; } - public static ModelIdsResponses Gpt35Turbo16k0613 { get; } - public static ModelIdsResponses Gpt4 { get; } - public static ModelIdsResponses Gpt40125Preview { get; } - public static ModelIdsResponses Gpt40314 { get; } - public static ModelIdsResponses Gpt40613 { get; } - public static ModelIdsResponses Gpt41 { get; } - public static ModelIdsResponses Gpt41106Preview { get; } - public static ModelIdsResponses Gpt4120250414 { get; } - public static ModelIdsResponses Gpt41Mini { get; } - public static ModelIdsResponses Gpt41Mini20250414 { get; } - public static ModelIdsResponses Gpt41Nano { get; } - public static ModelIdsResponses Gpt41Nano20250414 { get; } - public static ModelIdsResponses Gpt432k { get; } - public static ModelIdsResponses Gpt432k0314 { get; } - public static ModelIdsResponses Gpt432k0613 { get; } - public static ModelIdsResponses Gpt4o { get; } - public static ModelIdsResponses Gpt4o20240513 { get; } - public static ModelIdsResponses Gpt4o20240806 { get; } - public static ModelIdsResponses Gpt4o20241120 { get; } - public static ModelIdsResponses Gpt4oAudioPreview { get; } - public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } - public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } - public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } - public static ModelIdsResponses Gpt4oMini { get; } - public static ModelIdsResponses Gpt4oMini20240718 { get; } - public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } - public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } - public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } - public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } - public static ModelIdsResponses Gpt4oSearchPreview { get; } - public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } - public static ModelIdsResponses Gpt4Turbo { get; } - public static ModelIdsResponses Gpt4Turbo20240409 { get; } - public static ModelIdsResponses Gpt4TurboPreview { get; } - public static ModelIdsResponses Gpt4VisionPreview { get; } - public static ModelIdsResponses O1 { get; } - public static ModelIdsResponses O120241217 { get; } - public static ModelIdsResponses O1Mini { get; } - public static ModelIdsResponses O1Mini20240912 { get; } - public static ModelIdsResponses O1Preview { get; } - public static ModelIdsResponses O1Preview20240912 { get; } - public static ModelIdsResponses O1Pro { get; } - public static ModelIdsResponses O1Pro20250319 { get; } - public static ModelIdsResponses O3 { get; } - public static ModelIdsResponses O320250416 { get; } - public static ModelIdsResponses O3Mini { get; } - public static ModelIdsResponses O3Mini20250131 { get; } - public static ModelIdsResponses O3Pro { get; } - public static ModelIdsResponses O3Pro20250610 { get; } - public static ModelIdsResponses O4Mini { get; } - public static ModelIdsResponses O4Mini20250416 { get; } - public readonly bool Equals(ModelIdsResponses other); - [EditorBrowsable(EditorBrowsableState.Never)] - public override readonly bool Equals(object obj); - [EditorBrowsable(EditorBrowsableState.Never)] - public override readonly int GetHashCode(); - public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right); - public static implicit operator ModelIdsResponses(string value); - public static implicit operator ModelIdsResponses?(string value); - public static bool operator !=(ModelIdsResponses left, ModelIdsResponses right); - public override readonly string ToString(); - } public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); @@ -5238,6 +5164,7 @@ public class ResponseItemCollection : IJsonModel, IPersi public string FirstId { get; } public bool HasMore { get; } public string LastId { get; } + [EditorBrowsable(EditorBrowsableState.Never)] public string Object { get; } [EditorBrowsable(EditorBrowsableState.Never)] public ref JsonPatch Patch { get; } @@ -5352,7 +5279,6 @@ public class ResponseResult : IJsonModel, IPersistableModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); diff --git a/examples/Responses/Example01_SimpleResponse.cs b/examples/Responses/Example01_SimpleResponse.cs index c8bdc733c..034293ea3 100644 --- a/examples/Responses/Example01_SimpleResponse.cs +++ b/examples/Responses/Example01_SimpleResponse.cs @@ -13,9 +13,9 @@ public partial class ResponseExamples [Test] public void Example01_SimpleResponse() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); + ResponseResult response = client.CreateResponse(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")], "gpt-5")); Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } diff --git a/examples/Responses/Example01_SimpleResponseAsync.cs b/examples/Responses/Example01_SimpleResponseAsync.cs index 3331be231..928196850 100644 --- a/examples/Responses/Example01_SimpleResponseAsync.cs +++ b/examples/Responses/Example01_SimpleResponseAsync.cs @@ -14,9 +14,9 @@ public partial class ResponseExamples [Test] public async Task Example01_SimpleResponseAsync() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); + ResponseResult response = await client.CreateResponseAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")], "gpt-5")); Console.WriteLine($"[ASSISTANT]: {response.GetOutputText()}"); } diff --git a/examples/Responses/Example02_SimpleResponseStreaming.cs b/examples/Responses/Example02_SimpleResponseStreaming.cs index 83b1306cd..29626f1a0 100644 --- a/examples/Responses/Example02_SimpleResponseStreaming.cs +++ b/examples/Responses/Example02_SimpleResponseStreaming.cs @@ -14,9 +14,9 @@ public partial class ResponseExamples [Test] public void Example02_SimpleResponseStreaming() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - CollectionResult responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); + CollectionResult responseUpdates = client.CreateResponseStreaming(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")], "gpt-5")); Console.Write($"[ASSISTANT]: "); foreach (StreamingResponseUpdate update in responseUpdates) diff --git a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs index 7a19f1f62..661cf0535 100644 --- a/examples/Responses/Example02_SimpleResponseStreamingAsync.cs +++ b/examples/Responses/Example02_SimpleResponseStreamingAsync.cs @@ -16,9 +16,9 @@ public partial class ResponseExamples [Test] public async Task Example02_SimpleResponseStreamingAsync() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); - AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")])); + AsyncCollectionResult responseUpdates = client.CreateResponseStreamingAsync(new ([ResponseItem.CreateUserMessageItem("Say 'this is a test.'")], "gpt-5")); Console.Write($"[ASSISTANT]: "); await foreach (StreamingResponseUpdate update in responseUpdates) diff --git a/examples/Responses/Example03_FunctionCalling.cs b/examples/Responses/Example03_FunctionCalling.cs index af18b6314..6efcf3462 100644 --- a/examples/Responses/Example03_FunctionCalling.cs +++ b/examples/Responses/Example03_FunctionCalling.cs @@ -62,14 +62,14 @@ private static string GetCurrentWeather(string location, string unit = "celsius" [Test] public void Example03_FunctionCalling() { - ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems, "gpt-5") { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; diff --git a/examples/Responses/Example03_FunctionCallingAsync.cs b/examples/Responses/Example03_FunctionCallingAsync.cs index 1610746a3..1dbbf32cc 100644 --- a/examples/Responses/Example03_FunctionCallingAsync.cs +++ b/examples/Responses/Example03_FunctionCallingAsync.cs @@ -19,14 +19,14 @@ public partial class ResponseExamples [Test] public async Task Example03_FunctionCallingAsync() { - ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems, "gpt-5") { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; diff --git a/examples/Responses/Example04_FunctionCallingStreaming.cs b/examples/Responses/Example04_FunctionCallingStreaming.cs index caad5d601..fe19bbe78 100644 --- a/examples/Responses/Example04_FunctionCallingStreaming.cs +++ b/examples/Responses/Example04_FunctionCallingStreaming.cs @@ -19,14 +19,14 @@ public partial class ResponseExamples [Test] public void Example04_FunctionCallingStreaming() { - ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems, "gpt-5") { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; diff --git a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs index e0db98188..06155044e 100644 --- a/examples/Responses/Example04_FunctionCallingStreamingAsync.cs +++ b/examples/Responses/Example04_FunctionCallingStreamingAsync.cs @@ -20,14 +20,14 @@ public partial class ResponseExamples [Test] public async Task Example04_FunctionCallingStreamingAsync() { - ResponsesClient client = new("gpt-5", Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); List inputItems = [ ResponseItem.CreateUserMessageItem("What's the weather like today for my current location?"), ]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems, "gpt-5") { Tools = { getCurrentLocationTool, getCurrentWeatherTool }, }; diff --git a/examples/Responses/Example05_RemoteMcp.cs b/examples/Responses/Example05_RemoteMcp.cs index bd6a1430a..dd73759c7 100644 --- a/examples/Responses/Example05_RemoteMcp.cs +++ b/examples/Responses/Example05_RemoteMcp.cs @@ -15,7 +15,7 @@ public void Example05_RemoteMcp() { CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Roll 2d4+1") - ]) + ], "gpt-5") { Tools = { new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse")) @@ -26,7 +26,7 @@ public void Example05_RemoteMcp() } }; - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); diff --git a/examples/Responses/Example05_RemoteMcpAsync.cs b/examples/Responses/Example05_RemoteMcpAsync.cs index 0d05d61c1..3c9086e0c 100644 --- a/examples/Responses/Example05_RemoteMcpAsync.cs +++ b/examples/Responses/Example05_RemoteMcpAsync.cs @@ -16,7 +16,7 @@ public async Task Example05_RemoteMcpAsync() { CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Roll 2d4+1") - ]) + ], "gpt-5") { Tools = { new McpTool(serverLabel: "dmcp", serverUri: new Uri("https://dmcp-server.deno.dev/sse")) @@ -27,7 +27,7 @@ public async Task Example05_RemoteMcpAsync() } }; - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); diff --git a/examples/Responses/Example06_RemoteMcpAuthentication.cs b/examples/Responses/Example06_RemoteMcpAuthentication.cs index b38d506bc..e4785cb39 100644 --- a/examples/Responses/Example06_RemoteMcpAuthentication.cs +++ b/examples/Responses/Example06_RemoteMcpAuthentication.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples public void Example06_RemoteMcpAuthentication() { CreateResponseOptions options = new([ - ResponseItem.CreateUserMessageItem("Create a payment link for $20")]) + ResponseItem.CreateUserMessageItem("Create a payment link for $20")], "gpt-5") { Tools = { new McpTool(serverLabel: "stripe", serverUri: new Uri("https://mcp.stripe.com")) @@ -24,7 +24,7 @@ public void Example06_RemoteMcpAuthentication() } }; - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = client.CreateResponse(options); diff --git a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs index 66c5a533b..b46d172f0 100644 --- a/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs +++ b/examples/Responses/Example06_RemoteMcpAuthenticationAsync.cs @@ -16,7 +16,7 @@ public async Task Example06_RemoteMcpAuthenticationAsync() { CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Create a payment link for $20") - ]) + ], "gpt-5") { Tools = { new McpTool(serverLabel: "stripe", serverUri: new Uri("https://mcp.stripe.com")) @@ -26,7 +26,7 @@ public async Task Example06_RemoteMcpAuthenticationAsync() } }; - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); ResponseResult response = await client.CreateResponseAsync(options); diff --git a/examples/Responses/Example07_InputAdditionalProperties.cs b/examples/Responses/Example07_InputAdditionalProperties.cs index f2b494733..c63074e02 100644 --- a/examples/Responses/Example07_InputAdditionalProperties.cs +++ b/examples/Responses/Example07_InputAdditionalProperties.cs @@ -14,14 +14,14 @@ public partial class ResponseExamples [Test] public void Example07_InputAdditionalProperties() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("What is the answer to the ultimate question of life, the universe, and everything?") - ]); + ], "gpt-5"); options.Patch.Set("$.reasoning.effort"u8, "high"); options.Patch.Set("$.text.verbosity"u8, "medium"); diff --git a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs index 135f8b2ff..409b933ba 100644 --- a/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example07_InputAdditionalPropertiesAsync.cs @@ -15,14 +15,14 @@ public partial class ResponseExamples [Test] public async Task Example07_InputAdditionalPropertiesAsync() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `reasoning.effort` and `text.verbosity` that aren’t modeled on CreateResponseOptions in the request payload. // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("What is the answer to the ultimate question of life, the universe, and everything?") - ]); + ], "gpt-5"); options.Patch.Set("$.reasoning.effort"u8, "high"); options.Patch.Set("$.text.verbosity"u8, "medium"); diff --git a/examples/Responses/Example08_OutputAdditionalProperties.cs b/examples/Responses/Example08_OutputAdditionalProperties.cs index 5aa39003b..d7074ec31 100644 --- a/examples/Responses/Example08_OutputAdditionalProperties.cs +++ b/examples/Responses/Example08_OutputAdditionalProperties.cs @@ -15,11 +15,11 @@ public partial class ResponseExamples [Test] public void Example08_OutputAdditionalProperties() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") - ]) + ], "gpt-5") { Tools = { diff --git a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs index 3508ecf4f..0dd60ff17 100644 --- a/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs +++ b/examples/Responses/Example08_OutputAdditionalPropertiesAsync.cs @@ -16,11 +16,11 @@ public partial class ResponseExamples [Test] public async Task Example08_OutputAdditionalPropertiesAsync() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new( apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf") - ]) + ], "gpt-5") { Tools = { diff --git a/examples/Responses/Example09_ModelOverridePerRequest.cs b/examples/Responses/Example09_ModelOverridePerRequest.cs index 9c401b1c2..5d469232d 100644 --- a/examples/Responses/Example09_ModelOverridePerRequest.cs +++ b/examples/Responses/Example09_ModelOverridePerRequest.cs @@ -14,7 +14,7 @@ public partial class ResponseExamples [Test] public void Example09_ModelOverridePerRequest() { - ResponsesClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. @@ -22,7 +22,7 @@ public void Example09_ModelOverridePerRequest() // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Say 'this is a test.") - ]); + ], "gpt-4o"); // Model can also be set via constructor options.Patch.Set("$.model"u8, "gpt-5"); ResponseResult response = client.CreateResponse(options); diff --git a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs index b3155f1ab..f0ac3bce3 100644 --- a/examples/Responses/Example09_ModelOverridePerRequestAsync.cs +++ b/examples/Responses/Example09_ModelOverridePerRequestAsync.cs @@ -15,7 +15,7 @@ public partial class ResponseExamples [Test] public async Task Example09_ModelOverridePerRequestAsync() { - ResponsesClient client = new(model: "gpt-4o", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); // Add extra request fields using Patch. // Patch lets you set fields like `model` that aren't exposed on CreateResponseOptions. @@ -23,7 +23,7 @@ public async Task Example09_ModelOverridePerRequestAsync() // See the API docs https://platform.openai.com/docs/api-reference/responses/create for supported additional fields. CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Say 'this is a test.") - ]); + ], "gpt-4o"); // Model can also be set via constructor options.Patch.Set("$.model"u8, "gpt-5"); ResponseResult response = await client.CreateResponseAsync(options); diff --git a/examples/Responses/Example10_CodeInterpreter.cs b/examples/Responses/Example10_CodeInterpreter.cs index f127b3bc4..30aa7928d 100644 --- a/examples/Responses/Example10_CodeInterpreter.cs +++ b/examples/Responses/Example10_CodeInterpreter.cs @@ -17,7 +17,7 @@ public partial class ResponseExamples [Test] public void Example10_CodeInterpreter() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); @@ -25,7 +25,7 @@ public void Example10_CodeInterpreter() [ ResponseItem.CreateUserMessageItem("Create an Excel spreadsheet that contains the mathematical times tables from 1-12 and make it available for download."), ]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems, "gpt-5") { Tools = { codeInterpreterTool } }; diff --git a/examples/Responses/Example10_CodeInterpreterAsync.cs b/examples/Responses/Example10_CodeInterpreterAsync.cs index aad96a9f4..2bad4c840 100644 --- a/examples/Responses/Example10_CodeInterpreterAsync.cs +++ b/examples/Responses/Example10_CodeInterpreterAsync.cs @@ -18,13 +18,13 @@ public partial class ResponseExamples [Test] public async Task Example10_CodeInterpreterAsync() { - ResponsesClient client = new(model: "gpt-5", apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); + ResponsesClient client = new(apiKey: Environment.GetEnvironmentVariable("OPENAI_API_KEY")); CodeInterpreterToolContainer container = new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration()); CodeInterpreterTool codeInterpreterTool = new(container); CreateResponseOptions options = new([ ResponseItem.CreateUserMessageItem("Create an Excel spreadsheet that contains the mathematical times tables from 1-12 and make it available for download."), - ]) + ], "gpt-5") { Tools = { codeInterpreterTool } }; diff --git a/specification/client/models/responses.models.tsp b/specification/client/models/responses.models.tsp index 1bcc58fed..01676c009 100644 --- a/specification/client/models/responses.models.tsp +++ b/specification/client/models/responses.models.tsp @@ -68,3 +68,8 @@ model DotNetCodeInterpreterToolContainer { container_id?: string; container?: CodeInterpreterContainerConfiguration; } + +@usage(Usage.input| Usage.json) +model DotNetCreateResponse { + ...CreateResponse; +} diff --git a/specification/client/responses.client.tsp b/specification/client/responses.client.tsp index 839fa9ef6..1a25eb847 100644 --- a/specification/client/responses.client.tsp +++ b/specification/client/responses.client.tsp @@ -93,6 +93,7 @@ using TypeSpec.HttpClient.CSharp; // models.tsp @@dynamicModel(CreateResponse); +@@dynamicModel(DotNetCreateResponse); @@dynamicModel(Response); @@dynamicModel(ResponseProperties); @@dynamicModel(ItemParam); diff --git a/src/Custom/OpenAIClient.cs b/src/Custom/OpenAIClient.cs index c1f2b76b3..d12e580db 100644 --- a/src/Custom/OpenAIClient.cs +++ b/src/Custom/OpenAIClient.cs @@ -341,7 +341,7 @@ protected internal OpenAIClient(ClientPipeline pipeline, OpenAIClientOptions opt /// /// A new . [Experimental("OPENAI001")] - public virtual ResponsesClient GetResponsesClient(string model) => new(Pipeline, model, _options); + public virtual ResponsesClient GetResponsesClient() => new(Pipeline, _options); /// /// Gets a new instance of that reuses the client configuration details provided to diff --git a/src/Custom/Responses/CreateResponseOptions.Serialization.cs b/src/Custom/Responses/CreateResponseOptions.Serialization.cs index 05b5f549b..cfd149e18 100644 --- a/src/Custom/Responses/CreateResponseOptions.Serialization.cs +++ b/src/Custom/Responses/CreateResponseOptions.Serialization.cs @@ -1,604 +1,604 @@ -using System; -using System.ClientModel; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Text; -using System.Text.Json; -using OpenAI.Responses; +// using System; +// using System.ClientModel; +// using System.ClientModel.Primitives; +// using System.Collections.Generic; +// using System.Text; +// using System.Text.Json; +// using OpenAI.Responses; -namespace OpenAI.Responses -{ - public partial class CreateResponseOptions : IJsonModel - { - internal CreateResponseOptions() : this(null, default, default, null, default, null, default, null, default, default, null, null, null, null, default, null, null, default, default, default, default) - { - } +// namespace OpenAI.Responses +// { +// public partial class CreateResponseOptions : IJsonModel +// { +// internal CreateResponseOptions() : this(null, default, default, null, default, null, default, null, default, default, null, null, null, null, default, null, null, default, default, default, default) +// { +// } - void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - if (Patch.Contains("$"u8)) - { - writer.WriteRawValue(Patch.GetJson("$"u8)); - return; - } -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) +// { +// #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// if (Patch.Contains("$"u8)) +// { +// writer.WriteRawValue(Patch.GetJson("$"u8)); +// return; +// } +// #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - writer.WriteStartObject(); - JsonModelWriteCore(writer, options); - writer.WriteEndObject(); - } +// writer.WriteStartObject(); +// JsonModelWriteCore(writer, options); +// writer.WriteEndObject(); +// } - protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{format}' format."); - } -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - if (Optional.IsCollectionDefined(Metadata) && !Patch.Contains("$.metadata"u8)) - { - writer.WritePropertyName("metadata"u8); - writer.WriteStartObject(); -#if NET8_0_OR_GREATER - global::System.Span buffer = stackalloc byte[256]; -#endif - foreach (var item in Metadata) - { -#if NET8_0_OR_GREATER - int bytesWritten = global::System.Text.Encoding.UTF8.GetBytes(item.Key.AsSpan(), buffer); - bool patchContains = (bytesWritten == 256) ? Patch.Contains("$.metadata"u8, global::System.Text.Encoding.UTF8.GetBytes(item.Key)) : Patch.Contains("$.metadata"u8, buffer.Slice(0, bytesWritten)); -#else - bool patchContains = Patch.Contains("$.metadata"u8, Encoding.UTF8.GetBytes(item.Key)); -#endif - if (!patchContains) - { - writer.WritePropertyName(item.Key); - if (item.Value == null) - { - writer.WriteNullValue(); - continue; - } - writer.WriteStringValue(item.Value); - } - } +// protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) +// { +// string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; +// if (format != "J") +// { +// throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{format}' format."); +// } +// #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// if (Optional.IsCollectionDefined(Metadata) && !Patch.Contains("$.metadata"u8)) +// { +// writer.WritePropertyName("metadata"u8); +// writer.WriteStartObject(); +// #if NET8_0_OR_GREATER +// global::System.Span buffer = stackalloc byte[256]; +// #endif +// foreach (var item in Metadata) +// { +// #if NET8_0_OR_GREATER +// int bytesWritten = global::System.Text.Encoding.UTF8.GetBytes(item.Key.AsSpan(), buffer); +// bool patchContains = (bytesWritten == 256) ? Patch.Contains("$.metadata"u8, global::System.Text.Encoding.UTF8.GetBytes(item.Key)) : Patch.Contains("$.metadata"u8, buffer.Slice(0, bytesWritten)); +// #else +// bool patchContains = Patch.Contains("$.metadata"u8, Encoding.UTF8.GetBytes(item.Key)); +// #endif +// if (!patchContains) +// { +// writer.WritePropertyName(item.Key); +// if (item.Value == null) +// { +// writer.WriteNullValue(); +// continue; +// } +// writer.WriteStringValue(item.Value); +// } +// } - Patch.WriteTo(writer, "$.metadata"u8); - writer.WriteEndObject(); - } - if (Optional.IsDefined(Temperature) && !Patch.Contains("$.temperature"u8)) - { - writer.WritePropertyName("temperature"u8); - writer.WriteNumberValue(Temperature.Value); - } - if (Optional.IsDefined(TopP) && !Patch.Contains("$.top_p"u8)) - { - writer.WritePropertyName("top_p"u8); - writer.WriteNumberValue(TopP.Value); - } - if (Optional.IsDefined(EndUserId) && !Patch.Contains("$.user"u8)) - { - writer.WritePropertyName("user"u8); - writer.WriteStringValue(EndUserId); - } - if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) - { - writer.WritePropertyName("service_tier"u8); - writer.WriteStringValue(ServiceTier.Value.ToString()); - } - if (Optional.IsDefined(PreviousResponseId) && !Patch.Contains("$.previous_response_id"u8)) - { - writer.WritePropertyName("previous_response_id"u8); - writer.WriteStringValue(PreviousResponseId); - } - if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) - { - writer.WritePropertyName("model"u8); - writer.WriteStringValue(Model.Value.ToString()); - } - if (Optional.IsDefined(ReasoningOptions) && !Patch.Contains("$.reasoning"u8)) - { - writer.WritePropertyName("reasoning"u8); - writer.WriteObjectValue(ReasoningOptions, options); - } - if (Optional.IsDefined(IsBackgroundModeEnabled) && !Patch.Contains("$.background"u8)) - { - writer.WritePropertyName("background"u8); - writer.WriteBooleanValue(IsBackgroundModeEnabled.Value); - } - if (Optional.IsDefined(MaxOutputTokenCount) && !Patch.Contains("$.max_output_tokens"u8)) - { - writer.WritePropertyName("max_output_tokens"u8); - writer.WriteNumberValue(MaxOutputTokenCount.Value); - } - if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) - { - writer.WritePropertyName("instructions"u8); - writer.WriteStringValue(Instructions); - } - if (Optional.IsDefined(TextOptions) && !Patch.Contains("$.text"u8)) - { - writer.WritePropertyName("text"u8); - writer.WriteObjectValue(TextOptions, options); - } - if (Patch.Contains("$.tools"u8)) - { - if (!Patch.IsRemoved("$.tools"u8)) - { - writer.WritePropertyName("tools"u8); - writer.WriteRawValue(Patch.GetJson("$.tools"u8)); - } - } - else if (Optional.IsCollectionDefined(Tools)) - { - writer.WritePropertyName("tools"u8); - writer.WriteStartArray(); - for (int i = 0; i < Tools.Count; i++) - { - if (Tools[i].Patch.IsRemoved("$"u8)) - { - continue; - } - writer.WriteObjectValue(Tools[i], options); - } - Patch.WriteTo(writer, "$.tools"u8); - writer.WriteEndArray(); - } - if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) - { - writer.WritePropertyName("tool_choice"u8); - writer.WriteObjectValue(ToolChoice, options); - } - if (Optional.IsDefined(TruncationMode) && !Patch.Contains("$.truncation"u8)) - { - writer.WritePropertyName("truncation"u8); - writer.WriteStringValue(TruncationMode.Value.ToString()); - } - if (Patch.Contains("$.input"u8)) - { - if (!Patch.IsRemoved("$.input"u8)) - { - writer.WritePropertyName("input"u8); - writer.WriteRawValue(Patch.GetJson("$.input"u8)); - } - } - else - { - writer.WritePropertyName("input"u8); - writer.WriteStartArray(); - for (int i = 0; i < Input.Count; i++) - { - if (Input[i].Patch.IsRemoved("$"u8)) - { - continue; - } - writer.WriteObjectValue(Input[i], options); - } - Patch.WriteTo(writer, "$.input"u8); - writer.WriteEndArray(); - } - if (Patch.Contains("$.include"u8)) - { - if (!Patch.IsRemoved("$.include"u8)) - { - writer.WritePropertyName("include"u8); - writer.WriteRawValue(Patch.GetJson("$.include"u8)); - } - } - else if (Optional.IsCollectionDefined(Include)) - { - writer.WritePropertyName("include"u8); - writer.WriteStartArray(); - for (int i = 0; i < Include.Count; i++) - { - if (Patch.IsRemoved(Encoding.UTF8.GetBytes($"$.include[{i}]"))) - { - continue; - } - writer.WriteStringValue(Include[i].ToSerialString()); - } - Patch.WriteTo(writer, "$.include"u8); - writer.WriteEndArray(); - } - if (Optional.IsDefined(IsParallelToolCallsEnabled) && !Patch.Contains("$.parallel_tool_calls"u8)) - { - writer.WritePropertyName("parallel_tool_calls"u8); - writer.WriteBooleanValue(IsParallelToolCallsEnabled.Value); - } - if (Optional.IsDefined(IsStoredOutputEnabled) && !Patch.Contains("$.store"u8)) - { - writer.WritePropertyName("store"u8); - writer.WriteBooleanValue(IsStoredOutputEnabled.Value); - } - if (Optional.IsDefined(IsStreamingEnabled) && !Patch.Contains("$.stream"u8)) - { - writer.WritePropertyName("stream"u8); - writer.WriteBooleanValue(IsStreamingEnabled.Value); - } +// Patch.WriteTo(writer, "$.metadata"u8); +// writer.WriteEndObject(); +// } +// if (Optional.IsDefined(Temperature) && !Patch.Contains("$.temperature"u8)) +// { +// writer.WritePropertyName("temperature"u8); +// writer.WriteNumberValue(Temperature.Value); +// } +// if (Optional.IsDefined(TopP) && !Patch.Contains("$.top_p"u8)) +// { +// writer.WritePropertyName("top_p"u8); +// writer.WriteNumberValue(TopP.Value); +// } +// if (Optional.IsDefined(EndUserId) && !Patch.Contains("$.user"u8)) +// { +// writer.WritePropertyName("user"u8); +// writer.WriteStringValue(EndUserId); +// } +// if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) +// { +// writer.WritePropertyName("service_tier"u8); +// writer.WriteStringValue(ServiceTier.Value.ToString()); +// } +// if (Optional.IsDefined(PreviousResponseId) && !Patch.Contains("$.previous_response_id"u8)) +// { +// writer.WritePropertyName("previous_response_id"u8); +// writer.WriteStringValue(PreviousResponseId); +// } +// if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) +// { +// writer.WritePropertyName("model"u8); +// writer.WriteStringValue(Model.Value.ToString()); +// } +// if (Optional.IsDefined(ReasoningOptions) && !Patch.Contains("$.reasoning"u8)) +// { +// writer.WritePropertyName("reasoning"u8); +// writer.WriteObjectValue(ReasoningOptions, options); +// } +// if (Optional.IsDefined(IsBackgroundModeEnabled) && !Patch.Contains("$.background"u8)) +// { +// writer.WritePropertyName("background"u8); +// writer.WriteBooleanValue(IsBackgroundModeEnabled.Value); +// } +// if (Optional.IsDefined(MaxOutputTokenCount) && !Patch.Contains("$.max_output_tokens"u8)) +// { +// writer.WritePropertyName("max_output_tokens"u8); +// writer.WriteNumberValue(MaxOutputTokenCount.Value); +// } +// if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) +// { +// writer.WritePropertyName("instructions"u8); +// writer.WriteStringValue(Instructions); +// } +// if (Optional.IsDefined(TextOptions) && !Patch.Contains("$.text"u8)) +// { +// writer.WritePropertyName("text"u8); +// writer.WriteObjectValue(TextOptions, options); +// } +// if (Patch.Contains("$.tools"u8)) +// { +// if (!Patch.IsRemoved("$.tools"u8)) +// { +// writer.WritePropertyName("tools"u8); +// writer.WriteRawValue(Patch.GetJson("$.tools"u8)); +// } +// } +// else if (Optional.IsCollectionDefined(Tools)) +// { +// writer.WritePropertyName("tools"u8); +// writer.WriteStartArray(); +// for (int i = 0; i < Tools.Count; i++) +// { +// if (Tools[i].Patch.IsRemoved("$"u8)) +// { +// continue; +// } +// writer.WriteObjectValue(Tools[i], options); +// } +// Patch.WriteTo(writer, "$.tools"u8); +// writer.WriteEndArray(); +// } +// if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) +// { +// writer.WritePropertyName("tool_choice"u8); +// writer.WriteObjectValue(ToolChoice, options); +// } +// if (Optional.IsDefined(TruncationMode) && !Patch.Contains("$.truncation"u8)) +// { +// writer.WritePropertyName("truncation"u8); +// writer.WriteStringValue(TruncationMode.Value.ToString()); +// } +// if (Patch.Contains("$.input"u8)) +// { +// if (!Patch.IsRemoved("$.input"u8)) +// { +// writer.WritePropertyName("input"u8); +// writer.WriteRawValue(Patch.GetJson("$.input"u8)); +// } +// } +// else +// { +// writer.WritePropertyName("input"u8); +// writer.WriteStartArray(); +// for (int i = 0; i < Input.Count; i++) +// { +// if (Input[i].Patch.IsRemoved("$"u8)) +// { +// continue; +// } +// writer.WriteObjectValue(Input[i], options); +// } +// Patch.WriteTo(writer, "$.input"u8); +// writer.WriteEndArray(); +// } +// if (Patch.Contains("$.include"u8)) +// { +// if (!Patch.IsRemoved("$.include"u8)) +// { +// writer.WritePropertyName("include"u8); +// writer.WriteRawValue(Patch.GetJson("$.include"u8)); +// } +// } +// else if (Optional.IsCollectionDefined(Include)) +// { +// writer.WritePropertyName("include"u8); +// writer.WriteStartArray(); +// for (int i = 0; i < Include.Count; i++) +// { +// if (Patch.IsRemoved(Encoding.UTF8.GetBytes($"$.include[{i}]"))) +// { +// continue; +// } +// writer.WriteStringValue(Include[i].ToSerialString()); +// } +// Patch.WriteTo(writer, "$.include"u8); +// writer.WriteEndArray(); +// } +// if (Optional.IsDefined(IsParallelToolCallsEnabled) && !Patch.Contains("$.parallel_tool_calls"u8)) +// { +// writer.WritePropertyName("parallel_tool_calls"u8); +// writer.WriteBooleanValue(IsParallelToolCallsEnabled.Value); +// } +// if (Optional.IsDefined(IsStoredOutputEnabled) && !Patch.Contains("$.store"u8)) +// { +// writer.WritePropertyName("store"u8); +// writer.WriteBooleanValue(IsStoredOutputEnabled.Value); +// } +// if (Optional.IsDefined(IsStreamingEnabled) && !Patch.Contains("$.stream"u8)) +// { +// writer.WritePropertyName("stream"u8); +// writer.WriteBooleanValue(IsStreamingEnabled.Value); +// } - Patch.WriteTo(writer); -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - } +// Patch.WriteTo(writer); +// #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// } - CreateResponseOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); +// CreateResponseOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); - protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - if (format != "J") - { - throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{format}' format."); - } - using JsonDocument document = JsonDocument.ParseValue(ref reader); - return DeserializeCreateResponseOptions(document.RootElement, null, options); - } +// protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) +// { +// string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; +// if (format != "J") +// { +// throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{format}' format."); +// } +// using JsonDocument document = JsonDocument.ParseValue(ref reader); +// return DeserializeCreateResponseOptions(document.RootElement, null, options); +// } - internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonElement element, BinaryData data, ModelReaderWriterOptions options) - { - if (element.ValueKind == JsonValueKind.Null) - { - return null; - } - IDictionary metadata = default; - float? temperature = default; - float? topP = default; - string user = default; - ResponseServiceTier? serviceTier = default; - string previousResponseId = default; - ModelIdsResponses? model = default; - ResponseReasoningOptions reasoning = default; - bool? background = default; - int? maxOutputTokens = default; - string instructions = default; - ResponseTextOptions text = default; - IList tools = default; - ResponseToolChoice toolChoice = default; - ResponseTruncationMode? truncation = default; - IList input = default; - IList include = default; - bool? parallelToolCalls = default; - bool? store = default; - bool? stream = default; -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - foreach (var prop in element.EnumerateObject()) - { - if (prop.NameEquals("metadata"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - Dictionary dictionary = new Dictionary(); - foreach (var prop0 in prop.Value.EnumerateObject()) - { - if (prop0.Value.ValueKind == JsonValueKind.Null) - { - dictionary.Add(prop0.Name, null); - } - else - { - dictionary.Add(prop0.Name, prop0.Value.GetString()); - } - } - metadata = dictionary; - continue; - } - if (prop.NameEquals("temperature"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - temperature = null; - continue; - } - temperature = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("top_p"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - topP = null; - continue; - } - topP = prop.Value.GetSingle(); - continue; - } - if (prop.NameEquals("user"u8)) - { - user = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("service_tier"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - serviceTier = new ResponseServiceTier(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("previous_response_id"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - previousResponseId = null; - continue; - } - previousResponseId = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("model"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - model = new ModelIdsResponses(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("reasoning"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - reasoning = null; - continue; - } - reasoning = ResponseReasoningOptions.DeserializeResponseReasoningOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); - continue; - } - if (prop.NameEquals("background"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - background = null; - continue; - } - background = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("max_output_tokens"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - maxOutputTokens = null; - continue; - } - maxOutputTokens = prop.Value.GetInt32(); - continue; - } - if (prop.NameEquals("instructions"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - instructions = null; - continue; - } - instructions = prop.Value.GetString(); - continue; - } - if (prop.NameEquals("text"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - text = ResponseTextOptions.DeserializeResponseTextOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); - continue; - } - if (prop.NameEquals("tools"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - array.Add(ResponseTool.DeserializeResponseTool(item, item.GetUtf8Bytes(), options)); - } - tools = array; - continue; - } - if (prop.NameEquals("tool_choice"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - toolChoice = ResponseToolChoice.DeserializeResponseToolChoice(prop.Value, options); - continue; - } - if (prop.NameEquals("truncation"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - truncation = null; - continue; - } - truncation = new ResponseTruncationMode(prop.Value.GetString()); - continue; - } - if (prop.NameEquals("input"u8)) - { - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); - } - input = array; - continue; - } - if (prop.NameEquals("include"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - continue; - } - List array = new List(); - foreach (var item in prop.Value.EnumerateArray()) - { - array.Add(item.GetString().ToIncludable()); - } - include = array; - continue; - } - if (prop.NameEquals("parallel_tool_calls"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - parallelToolCalls = null; - continue; - } - parallelToolCalls = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("store"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - store = null; - continue; - } - store = prop.Value.GetBoolean(); - continue; - } - if (prop.NameEquals("stream"u8)) - { - if (prop.Value.ValueKind == JsonValueKind.Null) - { - stream = null; - continue; - } - stream = prop.Value.GetBoolean(); - continue; - } - patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); - } - return new CreateResponseOptions( - metadata ?? new ChangeTrackingDictionary(), - temperature, - topP, - user, - serviceTier, - previousResponseId, - model, - reasoning, - background, - maxOutputTokens, - instructions, - text, - tools ?? new ChangeTrackingList(), - toolChoice, - truncation, - input, - include ?? new ChangeTrackingList(), - parallelToolCalls, - store, - stream, - patch); - } +// internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonElement element, BinaryData data, ModelReaderWriterOptions options) +// { +// if (element.ValueKind == JsonValueKind.Null) +// { +// return null; +// } +// IDictionary metadata = default; +// float? temperature = default; +// float? topP = default; +// string user = default; +// ResponseServiceTier? serviceTier = default; +// string previousResponseId = default; +// ModelIdsResponses? model = default; +// ResponseReasoningOptions reasoning = default; +// bool? background = default; +// int? maxOutputTokens = default; +// string instructions = default; +// ResponseTextOptions text = default; +// IList tools = default; +// ResponseToolChoice toolChoice = default; +// ResponseTruncationMode? truncation = default; +// IList input = default; +// IList include = default; +// bool? parallelToolCalls = default; +// bool? store = default; +// bool? stream = default; +// #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); +// #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// foreach (var prop in element.EnumerateObject()) +// { +// if (prop.NameEquals("metadata"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// Dictionary dictionary = new Dictionary(); +// foreach (var prop0 in prop.Value.EnumerateObject()) +// { +// if (prop0.Value.ValueKind == JsonValueKind.Null) +// { +// dictionary.Add(prop0.Name, null); +// } +// else +// { +// dictionary.Add(prop0.Name, prop0.Value.GetString()); +// } +// } +// metadata = dictionary; +// continue; +// } +// if (prop.NameEquals("temperature"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// temperature = null; +// continue; +// } +// temperature = prop.Value.GetSingle(); +// continue; +// } +// if (prop.NameEquals("top_p"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// topP = null; +// continue; +// } +// topP = prop.Value.GetSingle(); +// continue; +// } +// if (prop.NameEquals("user"u8)) +// { +// user = prop.Value.GetString(); +// continue; +// } +// if (prop.NameEquals("service_tier"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// serviceTier = new ResponseServiceTier(prop.Value.GetString()); +// continue; +// } +// if (prop.NameEquals("previous_response_id"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// previousResponseId = null; +// continue; +// } +// previousResponseId = prop.Value.GetString(); +// continue; +// } +// if (prop.NameEquals("model"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// model = new ModelIdsResponses(prop.Value.GetString()); +// continue; +// } +// if (prop.NameEquals("reasoning"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// reasoning = null; +// continue; +// } +// reasoning = ResponseReasoningOptions.DeserializeResponseReasoningOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); +// continue; +// } +// if (prop.NameEquals("background"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// background = null; +// continue; +// } +// background = prop.Value.GetBoolean(); +// continue; +// } +// if (prop.NameEquals("max_output_tokens"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// maxOutputTokens = null; +// continue; +// } +// maxOutputTokens = prop.Value.GetInt32(); +// continue; +// } +// if (prop.NameEquals("instructions"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// instructions = null; +// continue; +// } +// instructions = prop.Value.GetString(); +// continue; +// } +// if (prop.NameEquals("text"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// text = ResponseTextOptions.DeserializeResponseTextOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); +// continue; +// } +// if (prop.NameEquals("tools"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// List array = new List(); +// foreach (var item in prop.Value.EnumerateArray()) +// { +// array.Add(ResponseTool.DeserializeResponseTool(item, item.GetUtf8Bytes(), options)); +// } +// tools = array; +// continue; +// } +// if (prop.NameEquals("tool_choice"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// toolChoice = ResponseToolChoice.DeserializeResponseToolChoice(prop.Value, options); +// continue; +// } +// if (prop.NameEquals("truncation"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// truncation = null; +// continue; +// } +// truncation = new ResponseTruncationMode(prop.Value.GetString()); +// continue; +// } +// if (prop.NameEquals("input"u8)) +// { +// List array = new List(); +// foreach (var item in prop.Value.EnumerateArray()) +// { +// array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); +// } +// input = array; +// continue; +// } +// if (prop.NameEquals("include"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// continue; +// } +// List array = new List(); +// foreach (var item in prop.Value.EnumerateArray()) +// { +// array.Add(item.GetString().ToIncludable()); +// } +// include = array; +// continue; +// } +// if (prop.NameEquals("parallel_tool_calls"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// parallelToolCalls = null; +// continue; +// } +// parallelToolCalls = prop.Value.GetBoolean(); +// continue; +// } +// if (prop.NameEquals("store"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// store = null; +// continue; +// } +// store = prop.Value.GetBoolean(); +// continue; +// } +// if (prop.NameEquals("stream"u8)) +// { +// if (prop.Value.ValueKind == JsonValueKind.Null) +// { +// stream = null; +// continue; +// } +// stream = prop.Value.GetBoolean(); +// continue; +// } +// patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); +// } +// return new CreateResponseOptions( +// metadata ?? new ChangeTrackingDictionary(), +// temperature, +// topP, +// user, +// serviceTier, +// previousResponseId, +// model, +// reasoning, +// background, +// maxOutputTokens, +// instructions, +// text, +// tools ?? new ChangeTrackingList(), +// toolChoice, +// truncation, +// input, +// include ?? new ChangeTrackingList(), +// parallelToolCalls, +// store, +// stream, +// patch); +// } - BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); +// BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); - protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - return ModelReaderWriter.Write(this, options, OpenAIContext.Default); - default: - throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{options.Format}' format."); - } - } +// protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) +// { +// string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; +// switch (format) +// { +// case "J": +// return ModelReaderWriter.Write(this, options, OpenAIContext.Default); +// default: +// throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{options.Format}' format."); +// } +// } - CreateResponseOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); +// CreateResponseOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); - protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) - { - string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; - switch (format) - { - case "J": - using (JsonDocument document = JsonDocument.Parse(data)) - { - return DeserializeCreateResponseOptions(document.RootElement, data, options); - } - default: - throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{options.Format}' format."); - } - } +// protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) +// { +// string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; +// switch (format) +// { +// case "J": +// using (JsonDocument document = JsonDocument.Parse(data)) +// { +// return DeserializeCreateResponseOptions(document.RootElement, data, options); +// } +// default: +// throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{options.Format}' format."); +// } +// } - string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; +// string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) - { - ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); - value = default; +// #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) +// { +// ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); +// value = default; - if (local.StartsWith("reasoning"u8)) - { - return ReasoningOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); - } - if (local.StartsWith("text"u8)) - { - return TextOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); - } - if (local.StartsWith("tools"u8)) - { - int propertyLength = "tools"u8.Length; - ReadOnlySpan currentSlice = local.Slice(propertyLength); - if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) - { - return false; - } - return Tools[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); - } - return false; - } -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// if (local.StartsWith("reasoning"u8)) +// { +// return ReasoningOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); +// } +// if (local.StartsWith("text"u8)) +// { +// return TextOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); +// } +// if (local.StartsWith("tools"u8)) +// { +// int propertyLength = "tools"u8.Length; +// ReadOnlySpan currentSlice = local.Slice(propertyLength); +// if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) +// { +// return false; +// } +// return Tools[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); +// } +// return false; +// } +// #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) - { - ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); +// #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) +// { +// ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); - if (local.StartsWith("reasoning"u8)) - { - ReasoningOptions.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); - return true; - } - if (local.StartsWith("text"u8)) - { - TextOptions.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); - return true; - } - if (local.StartsWith("tools"u8)) - { - int propertyLength = "tools"u8.Length; - ReadOnlySpan currentSlice = local.Slice(propertyLength); - if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) - { - return false; - } - Tools[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); - return true; - } - return false; - } -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. +// if (local.StartsWith("reasoning"u8)) +// { +// ReasoningOptions.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); +// return true; +// } +// if (local.StartsWith("text"u8)) +// { +// TextOptions.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); +// return true; +// } +// if (local.StartsWith("tools"u8)) +// { +// int propertyLength = "tools"u8.Length; +// ReadOnlySpan currentSlice = local.Slice(propertyLength); +// if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) +// { +// return false; +// } +// Tools[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); +// return true; +// } +// return false; +// } +// #pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions) - { - if (createResponseOptions == null) - { - return null; - } - return BinaryContent.Create(createResponseOptions, ModelSerializationExtensions.WireOptions); - } - } -} +// public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions) +// { +// if (createResponseOptions == null) +// { +// return null; +// } +// return BinaryContent.Create(createResponseOptions, ModelSerializationExtensions.WireOptions); +// } +// } +// } diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index dcd001e50..abae287e0 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -1,166 +1,91 @@ +using System.ClientModel; using System.ClientModel.Primitives; using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; using System.Linq; namespace OpenAI.Responses { - [Experimental("OPENAI001")] + [CodeGenType("DotNetCreateResponse")] public partial class CreateResponseOptions { - [Experimental("SCME0001")] - private JsonPatch _patch; - - public CreateResponseOptions(List input) - { - Argument.AssertNotNull(input, nameof(input)); - - Metadata = new ChangeTrackingDictionary(); - Tools = new ChangeTrackingList(); - Input = input; - Include = new ChangeTrackingList(); - } - -#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, ModelIdsResponses? model, ResponseReasoningOptions reasoning, bool? background, int? maxOutputTokens, string instructions, ResponseTextOptions text, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncation, IList input, IList include, bool? parallelToolCalls, bool? store, bool? stream, in JsonPatch patch) - { - // Plugin customization: ensure initialization of collections - Metadata = metadata ?? new ChangeTrackingDictionary(); - Temperature = temperature; - TopP = topP; - EndUserId = user; - ServiceTier = serviceTier; - PreviousResponseId = previousResponseId; - Model = model; - ReasoningOptions = reasoning; - IsBackgroundModeEnabled = background; - MaxOutputTokenCount = maxOutputTokens; - Instructions = instructions; - TextOptions = text; - Tools = tools ?? new ChangeTrackingList(); - ToolChoice = toolChoice; - TruncationMode = truncation; - Input = input; - Include = include ?? new ChangeTrackingList(); - IsParallelToolCallsEnabled = parallelToolCalls; - IsStoredOutputEnabled = store; - IsStreamingEnabled = stream; - _patch = patch; - _patch.SetPropagators(PropagateSet, PropagateGet); - } -#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - - [EditorBrowsable(EditorBrowsableState.Never)] - [Experimental("SCME0001")] - public ref JsonPatch Patch => ref _patch; - /// - /// Gets a dictionary of custom metadata for the response. This corresponds to the "metadata" property in the JSON representation. - /// - public IDictionary Metadata { get; } - - /// - /// Gets or sets the sampling temperature to use, between 0 and 2. This corresponds to the "temperature" property in the JSON representation. - /// - public float? Temperature { get; set; } - - /// - /// Gets or sets the nucleus sampling parameter, between 0 and 1. This corresponds to the "top_p" property in the JSON representation. + /// Gets or sets whether to run the response in background mode. This corresponds to the "background" property in the JSON representation. /// - public float? TopP { get; set; } + [CodeGenMember("Background")] + public bool? IsBackgroundModeEnabled { get; set; } /// - /// Gets or sets a unique identifier representing the end-user. This corresponds to the "user" property in the JSON representation. + /// Gets or sets how tool calls should be selected during response generation. This corresponds to the "tool_choice" property in the JSON representation. /// - public string EndUserId { get; set; } + [CodeGenMember("ToolChoice")] + public ResponseToolChoice ToolChoice { get; set; } /// - /// Gets or sets the service tier to be used for processing the request. This corresponds to the "service_tier" property in the JSON representation. + /// Gets or sets the input items to be processed for the response. This corresponds to the "input" property in the JSON representation. /// - public ResponseServiceTier? ServiceTier { get; set; } + [CodeGenMember("Input")] + public IList InputItems { get; internal set; } /// - /// Gets or sets the ID of the response to continue from, enabling streaming responses. This corresponds to the "previous_response_id" property in the JSON representation. + /// Gets or sets the list of fields to include in the response. This corresponds to the "include" property in the JSON representation. /// - public string PreviousResponseId { get; set; } + [CodeGenMember("Include")] + public IList IncludedProperties { get; } /// - /// Gets or sets the model to be used for generating the response. This corresponds to the "model" property in the JSON representation. + /// Gets or sets whether multiple tool calls can be made in parallel. This corresponds to the "parallel_tool_calls" property in the JSON representation. /// - public ModelIdsResponses? Model { get; set; } + [CodeGenMember("ParallelToolCalls")] + public bool? IsParallelToolCallsEnabled { get; set; } /// - /// Gets or sets the reasoning options for the response. This corresponds to the "reasoning" property in the JSON representation. + /// Gets or sets whether the response should be stored for later retrieval. This corresponds to the "store" property in the JSON representation. /// - public ResponseReasoningOptions ReasoningOptions { get; set; } + [CodeGenMember("Store")] + public bool? IsStoredOutputEnabled { get; set; } /// - /// Gets or sets whether to run the response in background mode. This corresponds to the "background" property in the JSON representation. + /// Gets or sets whether the response should be streamed. This corresponds to the "stream" property in the JSON representation. /// - public bool? IsBackgroundModeEnabled { get; set; } + [CodeGenMember("Stream")] + public bool? IsStreamingEnabled { get; set; } /// /// Gets or sets the maximum number of output tokens to generate. This corresponds to the "max_output_tokens" property in the JSON representation. /// + [CodeGenMember("MaxOutputTokens")] public int? MaxOutputTokenCount { get; set; } /// - /// Gets or sets the instructions to guide the response generation. This corresponds to the "instructions" property in the JSON representation. - /// - public string Instructions { get; set; } - - /// - /// Gets or sets the text format options for the response. This corresponds to the "text" property in the JSON representation. + /// Gets or sets the model to be used for generating the response. This corresponds to the "model" property in the JSON representation. /// - public ResponseTextOptions TextOptions { get; set; } + [CodeGenMember("Model")] + public string Model { get; set; } /// - /// Gets a list of tools available to the response. This corresponds to the "tools" property in the JSON representation. + /// Gets or sets the reasoning options for the response. This corresponds to the "reasoning" property in the JSON representation. /// - public IList Tools { get; } + [CodeGenMember("Reasoning")] + public ResponseReasoningOptions ReasoningOptions { get; set; } /// - /// Gets or sets how tool calls should be selected during response generation. This corresponds to the "tool_choice" property in the JSON representation. + /// Gets or sets the text format options for the response. This corresponds to the "text" property in the JSON representation. /// - public ResponseToolChoice ToolChoice { get; set; } + [CodeGenMember("Text")] + public ResponseTextOptions TextOptions { get; set; } - /// + /// /// Gets or sets the truncation mode for the response. This corresponds to the "truncation" property in the JSON representation. /// + [CodeGenMember("Truncation")] public ResponseTruncationMode? TruncationMode { get; set; } - /// - /// Gets or sets the input items to be processed for the response. This corresponds to the "input" property in the JSON representation. - /// - public IList Input { get; internal set; } - - /// - /// Gets or sets the list of fields to include in the response. This corresponds to the "include" property in the JSON representation. - /// - public IList Include { get; set; } - - /// - /// Gets or sets whether multiple tool calls can be made in parallel. This corresponds to the "parallel_tool_calls" property in the JSON representation. - /// - public bool? IsParallelToolCallsEnabled { get; set; } - - /// - /// Gets or sets whether the response should be stored for later retrieval. This corresponds to the "store" property in the JSON representation. - /// - public bool? IsStoredOutputEnabled { get; set; } - - /// - /// Gets or sets whether the response should be streamed. This corresponds to the "stream" property in the JSON representation. - /// - public bool? IsStreamingEnabled { get; set; } - - internal static CreateResponseOptions Create(IEnumerable inputItems, ResponsesClient client, ResponseCreationOptions options = null, bool isStreaming = false) + internal static CreateResponseOptions Create(IEnumerable inputItems, string model, ResponsesClient client, ResponseCreationOptions options = null, bool isStreaming = false) { Argument.AssertNotNull(inputItems, nameof(inputItems)); options ??= new(); - var responseCreationOptions = client.CreatePerCallOptions(options, inputItems, isStreaming); - + var responseCreationOptions = client.CreatePerCallOptions(options, inputItems, model, isStreaming); +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. return new CreateResponseOptions( responseCreationOptions.Metadata, responseCreationOptions.Temperature, @@ -183,6 +108,7 @@ [.. responseCreationOptions.Include.Select(x => x.ToIncludable())], responseCreationOptions.StoredOutputEnabled, responseCreationOptions.Stream, new JsonPatch()); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } internal CreateResponseOptions GetClone() @@ -192,5 +118,14 @@ internal CreateResponseOptions GetClone() return copiedOptions; } + + public static implicit operator BinaryContent(CreateResponseOptions createResponseOptions) + { + if (createResponseOptions == null) + { + return null; + } + return BinaryContent.Create(createResponseOptions, ModelSerializationExtensions.WireOptions); + } } } diff --git a/src/Custom/Responses/ModelIdResponses.cs b/src/Custom/Responses/ModelIdResponses.cs index 6221c481e..572599868 100644 --- a/src/Custom/Responses/ModelIdResponses.cs +++ b/src/Custom/Responses/ModelIdResponses.cs @@ -1,7 +1,7 @@ namespace OpenAI.Responses; [CodeGenType("ModelIdsResponses")] -public readonly partial struct ModelIdsResponses +internal readonly partial struct ModelIdsResponses { } \ No newline at end of file diff --git a/src/Custom/Responses/ResponseItemList.Serialization.cs b/src/Custom/Responses/ResponseItemCollection.Serialization.cs similarity index 100% rename from src/Custom/Responses/ResponseItemList.Serialization.cs rename to src/Custom/Responses/ResponseItemCollection.Serialization.cs diff --git a/src/Custom/Responses/ResponseItemList.cs b/src/Custom/Responses/ResponseItemCollection.cs similarity index 97% rename from src/Custom/Responses/ResponseItemList.cs rename to src/Custom/Responses/ResponseItemCollection.cs index c0ba8dda7..60a1472ef 100644 --- a/src/Custom/Responses/ResponseItemList.cs +++ b/src/Custom/Responses/ResponseItemCollection.cs @@ -38,6 +38,7 @@ internal ResponseItemCollection(string @object, IList data, bool h [Experimental("SCME0001")] public ref JsonPatch Patch => ref _patch; + [EditorBrowsable(EditorBrowsableState.Never)] public string Object { get; } = "list"; public IList Data { get; } diff --git a/src/Custom/Responses/ResponseResult.cs b/src/Custom/Responses/ResponseResult.cs index 347a414bc..f58f0f3ed 100644 --- a/src/Custom/Responses/ResponseResult.cs +++ b/src/Custom/Responses/ResponseResult.cs @@ -100,7 +100,7 @@ internal ResponseResult(IDictionary metadata, float? temperature /// /// Gets the internal model identifier that was used for generating the response. /// - public ModelIdsResponses? InternalModel { get; } + internal ModelIdsResponses? InternalModel { get; } /// /// Gets the model name that was used for generating the response. This corresponds to the "model" property in the JSON representation. diff --git a/src/Custom/Responses/ResponsesClient.cs b/src/Custom/Responses/ResponsesClient.cs index 1c563cf94..3adb3ec1c 100644 --- a/src/Custom/Responses/ResponsesClient.cs +++ b/src/Custom/Responses/ResponsesClient.cs @@ -25,15 +25,10 @@ namespace OpenAI.Responses; [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(CancellationToken))] public partial class ResponsesClient { - private readonly string _model; - // CUSTOM: Added as a convenience. /// Initializes a new instance of . - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The API key to authenticate with the service. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public ResponsesClient(string model, string apiKey) : this(model, new ApiKeyCredential(apiKey), new OpenAIClientOptions()) + public ResponsesClient(string apiKey) : this(new ApiKeyCredential(apiKey), new OpenAIClientOptions()) { } @@ -42,11 +37,8 @@ public partial class ResponsesClient // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. /// Initializes a new instance of . - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public ResponsesClient(string model, ApiKeyCredential credential) : this(model, credential, new OpenAIClientOptions()) + public ResponsesClient(ApiKeyCredential credential) : this(credential, new OpenAIClientOptions()) { } @@ -55,39 +47,29 @@ public partial class ResponsesClient // - Used a custom pipeline. // - Demoted the endpoint parameter to be a property in the options class. /// Initializes a new instance of . - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The to authenticate with the service. /// The options to configure the client. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public ResponsesClient(string model, ApiKeyCredential credential, OpenAIClientOptions options) : this(model, OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) + public ResponsesClient(ApiKeyCredential credential, OpenAIClientOptions options) : this(OpenAIClient.CreateApiKeyAuthenticationPolicy(credential), options) { } // CUSTOM: Added as a convenience. /// Initializes a new instance of . - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public ResponsesClient(string model, AuthenticationPolicy authenticationPolicy) : this(model, authenticationPolicy, new OpenAIClientOptions()) + public ResponsesClient(AuthenticationPolicy authenticationPolicy) : this(authenticationPolicy, new OpenAIClientOptions()) { } // CUSTOM: Added as a convenience. /// Initializes a new instance of . - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The authentication policy used to authenticate with the service. /// The options to configure the client. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public ResponsesClient(string model, AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) + /// is null. + public ResponsesClient(AuthenticationPolicy authenticationPolicy, OpenAIClientOptions options) { - Argument.AssertNotNullOrEmpty(model, nameof(model)); Argument.AssertNotNull(authenticationPolicy, nameof(authenticationPolicy)); options ??= new OpenAIClientOptions(); - _model = model; Pipeline = OpenAIClient.CreatePipeline(authenticationPolicy, options); _endpoint = OpenAIClient.GetEndpoint(options); } @@ -99,16 +81,13 @@ public ResponsesClient(string model, AuthenticationPolicy authenticationPolicy, // - Made protected. /// Initializes a new instance of . /// The HTTP pipeline to send and receive REST requests and responses. - /// The name of the model to use in requests sent to the service. To learn more about the available models, see . /// The options to configure the client. - /// or is null. - /// is an empty string, and was expected to be non-empty. - protected internal ResponsesClient(ClientPipeline pipeline, string model, OpenAIClientOptions options) + /// is null. + protected internal ResponsesClient(ClientPipeline pipeline, OpenAIClientOptions options) { Argument.AssertNotNull(pipeline, nameof(pipeline)); options ??= new OpenAIClientOptions(); - _model = model; Pipeline = pipeline; _endpoint = OpenAIClient.GetEndpoint(options); } @@ -119,18 +98,12 @@ protected internal ResponsesClient(ClientPipeline pipeline, string model, OpenAI [Experimental("OPENAI001")] public virtual Uri Endpoint => _endpoint; - /// - /// Gets the name of the model used in requests sent to the service. - /// - [Experimental("OPENAI001")] - public virtual string Model => _model; - - internal virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual Task> CreateResponseAsync(IEnumerable inputItems, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { - return CreateResponseAsync(inputItems, options, cancellationToken.ToRequestOptions() ?? new RequestOptions()); + return CreateResponseAsync(inputItems, model, options, cancellationToken.ToRequestOptions() ?? new RequestOptions()); } - internal async Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options, RequestOptions requestOptions) + internal async Task> CreateResponseAsync(IEnumerable inputItems, string model, ResponseCreationOptions options, RequestOptions requestOptions) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); Argument.AssertNotNull(requestOptions, nameof(requestOptions)); @@ -139,39 +112,41 @@ internal async Task> CreateResponseAsync(IEnumerabl throw new InvalidOperationException("'requestOptions.BufferResponse' must be 'true' when calling 'CreateResponseAsync'."); } - using BinaryContent content = CreatePerCallOptions(options, inputItems, stream: false).ToBinaryContent(); + using BinaryContent content = CreatePerCallOptions(options, inputItems, model, stream: false).ToBinaryContent(); ClientResult protocolResult = await CreateResponseAsync(content, requestOptions).ConfigureAwait(false); OpenAIResponse convenienceValue = (OpenAIResponse)protocolResult; return ClientResult.FromValue(convenienceValue, protocolResult.GetRawResponse()); } - internal virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual ClientResult CreateResponse(IEnumerable inputItems, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); - using BinaryContent content = CreatePerCallOptions(options, inputItems, stream: false).ToBinaryContent(); + using BinaryContent content = CreatePerCallOptions(options, inputItems, model, stream: false).ToBinaryContent(); ClientResult protocolResult = CreateResponse(content, cancellationToken.ToRequestOptions()); OpenAIResponse convenienceValue = (OpenAIResponse)protocolResult; return ClientResult.FromValue(convenienceValue, protocolResult.GetRawResponse()); } - internal virtual async Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual async Task> CreateResponseAsync(string userInputText, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); return await CreateResponseAsync( [ResponseItem.CreateUserMessageItem(userInputText)], + model, options, cancellationToken) .ConfigureAwait(false); } - internal virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual ClientResult CreateResponse(string userInputText, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); return CreateResponse( [ResponseItem.CreateUserMessageItem(userInputText)], + model, options, cancellationToken); } @@ -192,12 +167,12 @@ public virtual async Task> CreateResponseAsync(Crea return ClientResult.FromValue((ResponseResult)result.GetRawResponse().Content, result.GetRawResponse()); } - internal virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { - return CreateResponseStreamingAsync(inputItems, options, cancellationToken.ToRequestOptions(streaming: true)); + return CreateResponseStreamingAsync(inputItems, model, options, cancellationToken.ToRequestOptions(streaming: true)); } - internal AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options, RequestOptions requestOptions) + internal AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, string model, ResponseCreationOptions options, RequestOptions requestOptions) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); Argument.AssertNotNull(requestOptions, nameof(requestOptions)); @@ -206,7 +181,7 @@ internal AsyncCollectionResult CreateResponseStreamingA throw new InvalidOperationException("'requestOptions.BufferResponse' must be 'false' when calling 'CreateResponseStreamingAsync'."); } - using BinaryContent content = CreatePerCallOptions(options, inputItems, stream: true).ToBinaryContent(); + using BinaryContent content = CreatePerCallOptions(options, inputItems, model, stream: true).ToBinaryContent(); return new AsyncSseUpdateCollection( async () => await CreateResponseAsync(content, requestOptions).ConfigureAwait(false), StreamingResponseUpdate.DeserializeStreamingResponseUpdate, @@ -233,11 +208,11 @@ internal AsyncCollectionResult CreateResponseStreamingA requestOptions.CancellationToken); } - internal virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(inputItems, nameof(inputItems)); - using BinaryContent content = CreatePerCallOptions(options, inputItems, stream: true).ToBinaryContent(); + using BinaryContent content = CreatePerCallOptions(options, inputItems, model, stream: true).ToBinaryContent(); return new SseUpdateCollection( () => CreateResponse(content, cancellationToken.ToRequestOptions(streaming: true)), StreamingResponseUpdate.DeserializeStreamingResponseUpdate, @@ -254,22 +229,24 @@ public virtual CollectionResult CreateResponseStreaming cancellationToken); } - internal virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); return CreateResponseStreamingAsync( [ResponseItem.CreateUserMessageItem(userInputText)], + model, options, cancellationToken); } - internal virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) + internal virtual CollectionResult CreateResponseStreaming(string userInputText, string model, ResponseCreationOptions options = null, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(userInputText, nameof(userInputText)); return CreateResponseStreaming( [ResponseItem.CreateUserMessageItem(userInputText)], + model, options, cancellationToken); } @@ -431,20 +408,24 @@ public virtual async Task> GetResponseInput return ClientResult.FromValue((ResponseItemCollection)result, result.GetRawResponse()); } - internal virtual ResponseCreationOptions CreatePerCallOptions(ResponseCreationOptions userOptions, IEnumerable inputItems, bool stream = false) + internal virtual ResponseCreationOptions CreatePerCallOptions(ResponseCreationOptions userOptions, IEnumerable inputItems, string model, bool stream = false) { ResponseCreationOptions copiedOptions = userOptions is null ? new() : userOptions.GetClone(); copiedOptions.Input = inputItems.ToList(); - copiedOptions.Model = Model; if (stream) { copiedOptions.Stream = true; } + if (string.IsNullOrEmpty(copiedOptions.Model)) + { + copiedOptions.Model = model; + } + return copiedOptions; } @@ -454,11 +435,6 @@ internal virtual CreateResponseOptions CreatePerCallOptions(CreateResponseOption ? new() : userOptions.GetClone(); - if (copiedOptions.Model is null) - { - copiedOptions.Model = Model; - } - if (stream) { copiedOptions.IsStreamingEnabled = true; diff --git a/src/Generated/Models/OpenAIContext.cs b/src/Generated/Models/OpenAIContext.cs index 5d1b262c2..68f73da31 100644 --- a/src/Generated/Models/OpenAIContext.cs +++ b/src/Generated/Models/OpenAIContext.cs @@ -104,6 +104,7 @@ namespace OpenAI [ModelReaderWriterBuildable(typeof(CreateContainerBody))] [ModelReaderWriterBuildable(typeof(CreateContainerBodyExpiresAfter))] [ModelReaderWriterBuildable(typeof(CreateContainerFileBody))] + [ModelReaderWriterBuildable(typeof(CreateResponseOptions))] [ModelReaderWriterBuildable(typeof(CustomMcpToolCallApprovalPolicy))] [ModelReaderWriterBuildable(typeof(DeleteContainerFileResponse))] [ModelReaderWriterBuildable(typeof(DeleteContainerResponse))] diff --git a/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs b/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs new file mode 100644 index 000000000..e83bc531e --- /dev/null +++ b/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs @@ -0,0 +1,615 @@ +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using OpenAI; + +namespace OpenAI.Responses +{ + public partial class CreateResponseOptions : IJsonModel + { + internal CreateResponseOptions() : this(null, default, default, null, default, null, null, null, default, default, null, null, null, null, default, null, null, default, default, default, default) + { + } + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Patch.Contains("$"u8)) + { + writer.WriteRawValue(Patch.GetJson("$"u8)); + return; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{format}' format."); + } +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + if (Optional.IsCollectionDefined(Metadata) && !Patch.Contains("$.metadata"u8)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartObject(); +#if NET8_0_OR_GREATER + global::System.Span buffer = stackalloc byte[256]; +#endif + foreach (var item in Metadata) + { +#if NET8_0_OR_GREATER + int bytesWritten = global::System.Text.Encoding.UTF8.GetBytes(item.Key.AsSpan(), buffer); + bool patchContains = (bytesWritten == 256) ? Patch.Contains("$.metadata"u8, global::System.Text.Encoding.UTF8.GetBytes(item.Key)) : Patch.Contains("$.metadata"u8, buffer.Slice(0, bytesWritten)); +#else + bool patchContains = Patch.Contains("$.metadata"u8, Encoding.UTF8.GetBytes(item.Key)); +#endif + if (!patchContains) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStringValue(item.Value); + } + } + + Patch.WriteTo(writer, "$.metadata"u8); + writer.WriteEndObject(); + } + if (Optional.IsDefined(Temperature) && !Patch.Contains("$.temperature"u8)) + { + writer.WritePropertyName("temperature"u8); + writer.WriteNumberValue(Temperature.Value); + } + if (Optional.IsDefined(TopP) && !Patch.Contains("$.top_p"u8)) + { + writer.WritePropertyName("top_p"u8); + writer.WriteNumberValue(TopP.Value); + } + if (Optional.IsDefined(User) && !Patch.Contains("$.user"u8)) + { + writer.WritePropertyName("user"u8); + writer.WriteStringValue(User); + } + if (Optional.IsDefined(ServiceTier) && !Patch.Contains("$.service_tier"u8)) + { + writer.WritePropertyName("service_tier"u8); + writer.WriteStringValue(ServiceTier.Value.ToString()); + } + if (Optional.IsDefined(PreviousResponseId) && !Patch.Contains("$.previous_response_id"u8)) + { + writer.WritePropertyName("previous_response_id"u8); + writer.WriteStringValue(PreviousResponseId); + } + if (Optional.IsDefined(Model) && !Patch.Contains("$.model"u8)) + { + writer.WritePropertyName("model"u8); + writer.WriteStringValue(Model); + } + if (Optional.IsDefined(ReasoningOptions) && !Patch.Contains("$.reasoning"u8)) + { + writer.WritePropertyName("reasoning"u8); + writer.WriteObjectValue(ReasoningOptions, options); + } + if (Optional.IsDefined(IsBackgroundModeEnabled) && !Patch.Contains("$.background"u8)) + { + writer.WritePropertyName("background"u8); + writer.WriteBooleanValue(IsBackgroundModeEnabled.Value); + } + if (Optional.IsDefined(MaxOutputTokenCount) && !Patch.Contains("$.max_output_tokens"u8)) + { + writer.WritePropertyName("max_output_tokens"u8); + writer.WriteNumberValue(MaxOutputTokenCount.Value); + } + if (Optional.IsDefined(Instructions) && !Patch.Contains("$.instructions"u8)) + { + writer.WritePropertyName("instructions"u8); + writer.WriteStringValue(Instructions); + } + if (Optional.IsDefined(TextOptions) && !Patch.Contains("$.text"u8)) + { + writer.WritePropertyName("text"u8); + writer.WriteObjectValue(TextOptions, options); + } + if (Patch.Contains("$.tools"u8)) + { + if (!Patch.IsRemoved("$.tools"u8)) + { + writer.WritePropertyName("tools"u8); + writer.WriteRawValue(Patch.GetJson("$.tools"u8)); + } + } + else if (Optional.IsCollectionDefined(Tools)) + { + writer.WritePropertyName("tools"u8); + writer.WriteStartArray(); + for (int i = 0; i < Tools.Count; i++) + { + if (Tools[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(Tools[i], options); + } + Patch.WriteTo(writer, "$.tools"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(ToolChoice) && !Patch.Contains("$.tool_choice"u8)) + { + writer.WritePropertyName("tool_choice"u8); + writer.WriteObjectValue(ToolChoice, options); + } + if (Optional.IsDefined(TruncationMode) && !Patch.Contains("$.truncation"u8)) + { + writer.WritePropertyName("truncation"u8); + writer.WriteStringValue(TruncationMode.Value.ToString()); + } + if (Patch.Contains("$.input"u8)) + { + if (!Patch.IsRemoved("$.input"u8)) + { + writer.WritePropertyName("input"u8); + writer.WriteRawValue(Patch.GetJson("$.input"u8)); + } + } + else + { + writer.WritePropertyName("input"u8); + writer.WriteStartArray(); + for (int i = 0; i < InputItems.Count; i++) + { + if (InputItems[i].Patch.IsRemoved("$"u8)) + { + continue; + } + writer.WriteObjectValue(InputItems[i], options); + } + Patch.WriteTo(writer, "$.input"u8); + writer.WriteEndArray(); + } + if (Patch.Contains("$.include"u8)) + { + if (!Patch.IsRemoved("$.include"u8)) + { + writer.WritePropertyName("include"u8); + writer.WriteRawValue(Patch.GetJson("$.include"u8)); + } + } + else if (Optional.IsCollectionDefined(IncludedProperties)) + { + writer.WritePropertyName("include"u8); + writer.WriteStartArray(); + for (int i = 0; i < IncludedProperties.Count; i++) + { + if (Patch.IsRemoved(Encoding.UTF8.GetBytes($"$.include[{i}]"))) + { + continue; + } + writer.WriteStringValue(IncludedProperties[i].ToSerialString()); + } + Patch.WriteTo(writer, "$.include"u8); + writer.WriteEndArray(); + } + if (Optional.IsDefined(IsParallelToolCallsEnabled) && !Patch.Contains("$.parallel_tool_calls"u8)) + { + writer.WritePropertyName("parallel_tool_calls"u8); + writer.WriteBooleanValue(IsParallelToolCallsEnabled.Value); + } + if (Optional.IsDefined(IsStoredOutputEnabled) && !Patch.Contains("$.store"u8)) + { + writer.WritePropertyName("store"u8); + writer.WriteBooleanValue(IsStoredOutputEnabled.Value); + } + if (Optional.IsDefined(IsStreamingEnabled) && !Patch.Contains("$.stream"u8)) + { + writer.WritePropertyName("stream"u8); + writer.WriteBooleanValue(IsStreamingEnabled.Value); + } + + Patch.WriteTo(writer); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } + + CreateResponseOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => JsonModelCreateCore(ref reader, options); + + protected virtual CreateResponseOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateResponseOptions(document.RootElement, null, options); + } + + internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonElement element, BinaryData data, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IDictionary metadata = default; + float? temperature = default; + float? topP = default; + string user = default; + ResponseServiceTier? serviceTier = default; + string previousResponseId = default; + string model = default; + ResponseReasoningOptions reasoningOptions = default; + bool? isBackgroundModeEnabled = default; + int? maxOutputTokenCount = default; + string instructions = default; + ResponseTextOptions textOptions = default; + IList tools = default; + ResponseToolChoice toolChoice = default; + ResponseTruncationMode? truncationMode = default; + IList inputItems = default; + IList includedProperties = default; + bool? isParallelToolCallsEnabled = default; + bool? isStoredOutputEnabled = default; + bool? isStreamingEnabled = default; +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + JsonPatch patch = new JsonPatch(data is null ? ReadOnlyMemory.Empty : data.ToMemory()); +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("metadata"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var prop0 in prop.Value.EnumerateObject()) + { + if (prop0.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(prop0.Name, null); + } + else + { + dictionary.Add(prop0.Name, prop0.Value.GetString()); + } + } + metadata = dictionary; + continue; + } + if (prop.NameEquals("temperature"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + temperature = null; + continue; + } + temperature = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("top_p"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + topP = null; + continue; + } + topP = prop.Value.GetSingle(); + continue; + } + if (prop.NameEquals("user"u8)) + { + user = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("service_tier"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + serviceTier = new ResponseServiceTier(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("previous_response_id"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + previousResponseId = null; + continue; + } + previousResponseId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("model"u8)) + { + model = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("reasoning"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + reasoningOptions = null; + continue; + } + reasoningOptions = ResponseReasoningOptions.DeserializeResponseReasoningOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("background"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + isBackgroundModeEnabled = null; + continue; + } + isBackgroundModeEnabled = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("max_output_tokens"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + maxOutputTokenCount = null; + continue; + } + maxOutputTokenCount = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("instructions"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + instructions = null; + continue; + } + instructions = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("text"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + textOptions = ResponseTextOptions.DeserializeResponseTextOptions(prop.Value, prop.Value.GetUtf8Bytes(), options); + continue; + } + if (prop.NameEquals("tools"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseTool.DeserializeResponseTool(item, item.GetUtf8Bytes(), options)); + } + tools = array; + continue; + } + if (prop.NameEquals("tool_choice"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + toolChoice = ResponseToolChoice.DeserializeResponseToolChoice(prop.Value, options); + continue; + } + if (prop.NameEquals("truncation"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + truncationMode = null; + continue; + } + truncationMode = new ResponseTruncationMode(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("input"u8)) + { + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(ResponseItem.DeserializeResponseItem(item, item.GetUtf8Bytes(), options)); + } + inputItems = array; + continue; + } + if (prop.NameEquals("include"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in prop.Value.EnumerateArray()) + { + array.Add(item.GetString().ToIncludable()); + } + includedProperties = array; + continue; + } + if (prop.NameEquals("parallel_tool_calls"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + isParallelToolCallsEnabled = null; + continue; + } + isParallelToolCallsEnabled = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("store"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + isStoredOutputEnabled = null; + continue; + } + isStoredOutputEnabled = prop.Value.GetBoolean(); + continue; + } + if (prop.NameEquals("stream"u8)) + { + if (prop.Value.ValueKind == JsonValueKind.Null) + { + isStreamingEnabled = null; + continue; + } + isStreamingEnabled = prop.Value.GetBoolean(); + continue; + } + patch.Set([.. "$."u8, .. Encoding.UTF8.GetBytes(prop.Name)], prop.Value.GetUtf8Bytes()); + } + return new CreateResponseOptions( + metadata ?? new ChangeTrackingDictionary(), + temperature, + topP, + user, + serviceTier, + previousResponseId, + model, + reasoningOptions, + isBackgroundModeEnabled, + maxOutputTokenCount, + instructions, + textOptions, + tools ?? new ChangeTrackingList(), + toolChoice, + truncationMode, + inputItems, + includedProperties ?? new ChangeTrackingList(), + isParallelToolCallsEnabled, + isStoredOutputEnabled, + isStreamingEnabled, + patch); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, OpenAIContext.Default); + default: + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support writing '{options.Format}' format."); + } + } + + CreateResponseOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => PersistableModelCreateCore(data, options); + + protected virtual CreateResponseOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data, ModelSerializationExtensions.JsonDocumentOptions)) + { + return DeserializeCreateResponseOptions(document.RootElement, data, options); + } + default: + throw new FormatException($"The model {nameof(CreateResponseOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateGet(ReadOnlySpan jsonPath, out JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + value = default; + + if (local.StartsWith("reasoning"u8)) + { + return ReasoningOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("reasoning"u8.Length)], out value); + } + if (local.StartsWith("text"u8)) + { + return TextOptions.Patch.TryGetEncodedValue([.. "$"u8, .. local.Slice("text"u8.Length)], out value); + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return Tools[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + if (local.StartsWith("input"u8)) + { + int propertyLength = "input"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + return InputItems[index].Patch.TryGetEncodedValue([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], out value); + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + private bool PropagateSet(ReadOnlySpan jsonPath, JsonPatch.EncodedValue value) + { + ReadOnlySpan local = jsonPath.SliceToStartOfPropertyName(); + + if (local.StartsWith("reasoning"u8)) + { + ReasoningOptions.Patch.Set([.. "$"u8, .. local.Slice("reasoning"u8.Length)], value); + return true; + } + if (local.StartsWith("text"u8)) + { + TextOptions.Patch.Set([.. "$"u8, .. local.Slice("text"u8.Length)], value); + return true; + } + if (local.StartsWith("tools"u8)) + { + int propertyLength = "tools"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + Tools[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + if (local.StartsWith("input"u8)) + { + int propertyLength = "input"u8.Length; + ReadOnlySpan currentSlice = local.Slice(propertyLength); + if (!currentSlice.TryGetIndex(out int index, out int bytesConsumed)) + { + return false; + } + InputItems[index].Patch.Set([.. "$"u8, .. currentSlice.Slice(bytesConsumed)], value); + return true; + } + return false; + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + } +} diff --git a/src/Generated/Models/Responses/CreateResponseOptions.cs b/src/Generated/Models/Responses/CreateResponseOptions.cs new file mode 100644 index 000000000..60b416499 --- /dev/null +++ b/src/Generated/Models/Responses/CreateResponseOptions.cs @@ -0,0 +1,82 @@ +// + +#nullable disable + +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.Json.Serialization; +using OpenAI; + +namespace OpenAI.Responses +{ + [Experimental("OPENAI001")] + public partial class CreateResponseOptions + { + [Experimental("SCME0001")] + private JsonPatch _patch; + + public CreateResponseOptions(IEnumerable inputItems, string model) + { + Argument.AssertNotNull(inputItems, nameof(inputItems)); + + Metadata = new ChangeTrackingDictionary(); + Tools = new ChangeTrackingList(); + InputItems = inputItems.ToList(); + IncludedProperties = new ChangeTrackingList(); + Model = model; + } + +#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, string model, ResponseReasoningOptions reasoningOptions, bool? isBackgroundModeEnabled, int? maxOutputTokenCount, string instructions, ResponseTextOptions textOptions, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncationMode, IList inputItems, IList includedProperties, bool? isParallelToolCallsEnabled, bool? isStoredOutputEnabled, bool? isStreamingEnabled, in JsonPatch patch) + { + // Plugin customization: ensure initialization of collections + Metadata = metadata ?? new ChangeTrackingDictionary(); + Temperature = temperature; + TopP = topP; + User = user; + ServiceTier = serviceTier; + PreviousResponseId = previousResponseId; + Model = model; + ReasoningOptions = reasoningOptions; + IsBackgroundModeEnabled = isBackgroundModeEnabled; + MaxOutputTokenCount = maxOutputTokenCount; + Instructions = instructions; + TextOptions = textOptions; + Tools = tools ?? new ChangeTrackingList(); + ToolChoice = toolChoice; + TruncationMode = truncationMode; + InputItems = inputItems ?? new ChangeTrackingList(); + IncludedProperties = includedProperties ?? new ChangeTrackingList(); + IsParallelToolCallsEnabled = isParallelToolCallsEnabled; + IsStoredOutputEnabled = isStoredOutputEnabled; + IsStreamingEnabled = isStreamingEnabled; + _patch = patch; + _patch.SetPropagators(PropagateSet, PropagateGet); + } +#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + [JsonIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + [Experimental("SCME0001")] + public ref JsonPatch Patch => ref _patch; + + public IDictionary Metadata { get; } + + public float? Temperature { get; set; } + + public float? TopP { get; set; } + + public string User { get; set; } + + public ResponseServiceTier? ServiceTier { get; set; } + + public string PreviousResponseId { get; set; } + + public string Instructions { get; set; } + + public IList Tools { get; } + } +} diff --git a/src/Generated/Models/Responses/ModelIdsResponses.cs b/src/Generated/Models/Responses/ModelIdsResponses.cs index 9e8675afc..722988939 100644 --- a/src/Generated/Models/Responses/ModelIdsResponses.cs +++ b/src/Generated/Models/Responses/ModelIdsResponses.cs @@ -4,13 +4,11 @@ using System; using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; using OpenAI; namespace OpenAI.Responses { - [Experimental("OPENAI001")] - public readonly partial struct ModelIdsResponses : IEquatable + internal readonly partial struct ModelIdsResponses : IEquatable { private readonly string _value; private const string Gpt41Value = "gpt-4.1"; @@ -82,127 +80,127 @@ public ModelIdsResponses(string value) _value = value; } - public static ModelIdsResponses Gpt41 { get; } = new ModelIdsResponses(Gpt41Value); + internal static ModelIdsResponses Gpt41 { get; } = new ModelIdsResponses(Gpt41Value); - public static ModelIdsResponses Gpt41Mini { get; } = new ModelIdsResponses(Gpt41MiniValue); + internal static ModelIdsResponses Gpt41Mini { get; } = new ModelIdsResponses(Gpt41MiniValue); - public static ModelIdsResponses Gpt41Nano { get; } = new ModelIdsResponses(Gpt41NanoValue); + internal static ModelIdsResponses Gpt41Nano { get; } = new ModelIdsResponses(Gpt41NanoValue); - public static ModelIdsResponses Gpt4120250414 { get; } = new ModelIdsResponses(Gpt4120250414Value); + internal static ModelIdsResponses Gpt4120250414 { get; } = new ModelIdsResponses(Gpt4120250414Value); - public static ModelIdsResponses Gpt41Mini20250414 { get; } = new ModelIdsResponses(Gpt41Mini20250414Value); + internal static ModelIdsResponses Gpt41Mini20250414 { get; } = new ModelIdsResponses(Gpt41Mini20250414Value); - public static ModelIdsResponses Gpt41Nano20250414 { get; } = new ModelIdsResponses(Gpt41Nano20250414Value); + internal static ModelIdsResponses Gpt41Nano20250414 { get; } = new ModelIdsResponses(Gpt41Nano20250414Value); - public static ModelIdsResponses O4Mini { get; } = new ModelIdsResponses(O4MiniValue); + internal static ModelIdsResponses O4Mini { get; } = new ModelIdsResponses(O4MiniValue); - public static ModelIdsResponses O4Mini20250416 { get; } = new ModelIdsResponses(O4Mini20250416Value); + internal static ModelIdsResponses O4Mini20250416 { get; } = new ModelIdsResponses(O4Mini20250416Value); - public static ModelIdsResponses O3 { get; } = new ModelIdsResponses(O3Value); + internal static ModelIdsResponses O3 { get; } = new ModelIdsResponses(O3Value); - public static ModelIdsResponses O320250416 { get; } = new ModelIdsResponses(O320250416Value); + internal static ModelIdsResponses O320250416 { get; } = new ModelIdsResponses(O320250416Value); - public static ModelIdsResponses O3Mini { get; } = new ModelIdsResponses(O3MiniValue); + internal static ModelIdsResponses O3Mini { get; } = new ModelIdsResponses(O3MiniValue); - public static ModelIdsResponses O3Mini20250131 { get; } = new ModelIdsResponses(O3Mini20250131Value); + internal static ModelIdsResponses O3Mini20250131 { get; } = new ModelIdsResponses(O3Mini20250131Value); - public static ModelIdsResponses O1 { get; } = new ModelIdsResponses(O1Value); + internal static ModelIdsResponses O1 { get; } = new ModelIdsResponses(O1Value); - public static ModelIdsResponses O120241217 { get; } = new ModelIdsResponses(O120241217Value); + internal static ModelIdsResponses O120241217 { get; } = new ModelIdsResponses(O120241217Value); - public static ModelIdsResponses O1Preview { get; } = new ModelIdsResponses(O1PreviewValue); + internal static ModelIdsResponses O1Preview { get; } = new ModelIdsResponses(O1PreviewValue); - public static ModelIdsResponses O1Preview20240912 { get; } = new ModelIdsResponses(O1Preview20240912Value); + internal static ModelIdsResponses O1Preview20240912 { get; } = new ModelIdsResponses(O1Preview20240912Value); - public static ModelIdsResponses O1Mini { get; } = new ModelIdsResponses(O1MiniValue); + internal static ModelIdsResponses O1Mini { get; } = new ModelIdsResponses(O1MiniValue); - public static ModelIdsResponses O1Mini20240912 { get; } = new ModelIdsResponses(O1Mini20240912Value); + internal static ModelIdsResponses O1Mini20240912 { get; } = new ModelIdsResponses(O1Mini20240912Value); - public static ModelIdsResponses Gpt4o { get; } = new ModelIdsResponses(Gpt4oValue); + internal static ModelIdsResponses Gpt4o { get; } = new ModelIdsResponses(Gpt4oValue); - public static ModelIdsResponses Gpt4o20241120 { get; } = new ModelIdsResponses(Gpt4o20241120Value); + internal static ModelIdsResponses Gpt4o20241120 { get; } = new ModelIdsResponses(Gpt4o20241120Value); - public static ModelIdsResponses Gpt4o20240806 { get; } = new ModelIdsResponses(Gpt4o20240806Value); + internal static ModelIdsResponses Gpt4o20240806 { get; } = new ModelIdsResponses(Gpt4o20240806Value); - public static ModelIdsResponses Gpt4o20240513 { get; } = new ModelIdsResponses(Gpt4o20240513Value); + internal static ModelIdsResponses Gpt4o20240513 { get; } = new ModelIdsResponses(Gpt4o20240513Value); - public static ModelIdsResponses Gpt4oAudioPreview { get; } = new ModelIdsResponses(Gpt4oAudioPreviewValue); + internal static ModelIdsResponses Gpt4oAudioPreview { get; } = new ModelIdsResponses(Gpt4oAudioPreviewValue); - public static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241001Value); + internal static ModelIdsResponses Gpt4oAudioPreview20241001 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241001Value); - public static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241217Value); + internal static ModelIdsResponses Gpt4oAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20241217Value); - public static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20250603Value); + internal static ModelIdsResponses Gpt4oAudioPreview20250603 { get; } = new ModelIdsResponses(Gpt4oAudioPreview20250603Value); - public static ModelIdsResponses Gpt4oMiniAudioPreview { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreviewValue); + internal static ModelIdsResponses Gpt4oMiniAudioPreview { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreviewValue); - public static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreview20241217Value); + internal static ModelIdsResponses Gpt4oMiniAudioPreview20241217 { get; } = new ModelIdsResponses(Gpt4oMiniAudioPreview20241217Value); - public static ModelIdsResponses Gpt4oSearchPreview { get; } = new ModelIdsResponses(Gpt4oSearchPreviewValue); + internal static ModelIdsResponses Gpt4oSearchPreview { get; } = new ModelIdsResponses(Gpt4oSearchPreviewValue); - public static ModelIdsResponses Gpt4oMiniSearchPreview { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreviewValue); + internal static ModelIdsResponses Gpt4oMiniSearchPreview { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreviewValue); - public static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oSearchPreview20250311Value); + internal static ModelIdsResponses Gpt4oSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oSearchPreview20250311Value); - public static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreview20250311Value); + internal static ModelIdsResponses Gpt4oMiniSearchPreview20250311 { get; } = new ModelIdsResponses(Gpt4oMiniSearchPreview20250311Value); - public static ModelIdsResponses Chatgpt4oLatest { get; } = new ModelIdsResponses(Chatgpt4oLatestValue); + internal static ModelIdsResponses Chatgpt4oLatest { get; } = new ModelIdsResponses(Chatgpt4oLatestValue); - public static ModelIdsResponses CodexMiniLatest { get; } = new ModelIdsResponses(CodexMiniLatestValue); + internal static ModelIdsResponses CodexMiniLatest { get; } = new ModelIdsResponses(CodexMiniLatestValue); - public static ModelIdsResponses Gpt4oMini { get; } = new ModelIdsResponses(Gpt4oMiniValue); + internal static ModelIdsResponses Gpt4oMini { get; } = new ModelIdsResponses(Gpt4oMiniValue); - public static ModelIdsResponses Gpt4oMini20240718 { get; } = new ModelIdsResponses(Gpt4oMini20240718Value); + internal static ModelIdsResponses Gpt4oMini20240718 { get; } = new ModelIdsResponses(Gpt4oMini20240718Value); - public static ModelIdsResponses Gpt4Turbo { get; } = new ModelIdsResponses(Gpt4TurboValue); + internal static ModelIdsResponses Gpt4Turbo { get; } = new ModelIdsResponses(Gpt4TurboValue); - public static ModelIdsResponses Gpt4Turbo20240409 { get; } = new ModelIdsResponses(Gpt4Turbo20240409Value); + internal static ModelIdsResponses Gpt4Turbo20240409 { get; } = new ModelIdsResponses(Gpt4Turbo20240409Value); - public static ModelIdsResponses Gpt40125Preview { get; } = new ModelIdsResponses(Gpt40125PreviewValue); + internal static ModelIdsResponses Gpt40125Preview { get; } = new ModelIdsResponses(Gpt40125PreviewValue); - public static ModelIdsResponses Gpt4TurboPreview { get; } = new ModelIdsResponses(Gpt4TurboPreviewValue); + internal static ModelIdsResponses Gpt4TurboPreview { get; } = new ModelIdsResponses(Gpt4TurboPreviewValue); - public static ModelIdsResponses Gpt41106Preview { get; } = new ModelIdsResponses(Gpt41106PreviewValue); + internal static ModelIdsResponses Gpt41106Preview { get; } = new ModelIdsResponses(Gpt41106PreviewValue); - public static ModelIdsResponses Gpt4VisionPreview { get; } = new ModelIdsResponses(Gpt4VisionPreviewValue); + internal static ModelIdsResponses Gpt4VisionPreview { get; } = new ModelIdsResponses(Gpt4VisionPreviewValue); - public static ModelIdsResponses Gpt4 { get; } = new ModelIdsResponses(Gpt4Value); + internal static ModelIdsResponses Gpt4 { get; } = new ModelIdsResponses(Gpt4Value); - public static ModelIdsResponses Gpt40314 { get; } = new ModelIdsResponses(Gpt40314Value); + internal static ModelIdsResponses Gpt40314 { get; } = new ModelIdsResponses(Gpt40314Value); - public static ModelIdsResponses Gpt40613 { get; } = new ModelIdsResponses(Gpt40613Value); + internal static ModelIdsResponses Gpt40613 { get; } = new ModelIdsResponses(Gpt40613Value); - public static ModelIdsResponses Gpt432k { get; } = new ModelIdsResponses(Gpt432kValue); + internal static ModelIdsResponses Gpt432k { get; } = new ModelIdsResponses(Gpt432kValue); - public static ModelIdsResponses Gpt432k0314 { get; } = new ModelIdsResponses(Gpt432k0314Value); + internal static ModelIdsResponses Gpt432k0314 { get; } = new ModelIdsResponses(Gpt432k0314Value); - public static ModelIdsResponses Gpt432k0613 { get; } = new ModelIdsResponses(Gpt432k0613Value); + internal static ModelIdsResponses Gpt432k0613 { get; } = new ModelIdsResponses(Gpt432k0613Value); - public static ModelIdsResponses Gpt35Turbo { get; } = new ModelIdsResponses(Gpt35TurboValue); + internal static ModelIdsResponses Gpt35Turbo { get; } = new ModelIdsResponses(Gpt35TurboValue); - public static ModelIdsResponses Gpt35Turbo16k { get; } = new ModelIdsResponses(Gpt35Turbo16kValue); + internal static ModelIdsResponses Gpt35Turbo16k { get; } = new ModelIdsResponses(Gpt35Turbo16kValue); - public static ModelIdsResponses Gpt35Turbo0301 { get; } = new ModelIdsResponses(Gpt35Turbo0301Value); + internal static ModelIdsResponses Gpt35Turbo0301 { get; } = new ModelIdsResponses(Gpt35Turbo0301Value); - public static ModelIdsResponses Gpt35Turbo0613 { get; } = new ModelIdsResponses(Gpt35Turbo0613Value); + internal static ModelIdsResponses Gpt35Turbo0613 { get; } = new ModelIdsResponses(Gpt35Turbo0613Value); - public static ModelIdsResponses Gpt35Turbo1106 { get; } = new ModelIdsResponses(Gpt35Turbo1106Value); + internal static ModelIdsResponses Gpt35Turbo1106 { get; } = new ModelIdsResponses(Gpt35Turbo1106Value); - public static ModelIdsResponses Gpt35Turbo0125 { get; } = new ModelIdsResponses(Gpt35Turbo0125Value); + internal static ModelIdsResponses Gpt35Turbo0125 { get; } = new ModelIdsResponses(Gpt35Turbo0125Value); - public static ModelIdsResponses Gpt35Turbo16k0613 { get; } = new ModelIdsResponses(Gpt35Turbo16k0613Value); + internal static ModelIdsResponses Gpt35Turbo16k0613 { get; } = new ModelIdsResponses(Gpt35Turbo16k0613Value); - public static ModelIdsResponses O1Pro { get; } = new ModelIdsResponses(O1ProValue); + internal static ModelIdsResponses O1Pro { get; } = new ModelIdsResponses(O1ProValue); - public static ModelIdsResponses O1Pro20250319 { get; } = new ModelIdsResponses(O1Pro20250319Value); + internal static ModelIdsResponses O1Pro20250319 { get; } = new ModelIdsResponses(O1Pro20250319Value); - public static ModelIdsResponses O3Pro { get; } = new ModelIdsResponses(O3ProValue); + internal static ModelIdsResponses O3Pro { get; } = new ModelIdsResponses(O3ProValue); - public static ModelIdsResponses O3Pro20250610 { get; } = new ModelIdsResponses(O3Pro20250610Value); + internal static ModelIdsResponses O3Pro20250610 { get; } = new ModelIdsResponses(O3Pro20250610Value); - public static ModelIdsResponses ComputerUsePreview { get; } = new ModelIdsResponses(ComputerUsePreviewValue); + internal static ModelIdsResponses ComputerUsePreview { get; } = new ModelIdsResponses(ComputerUsePreviewValue); - public static ModelIdsResponses ComputerUsePreview20250311 { get; } = new ModelIdsResponses(ComputerUsePreview20250311Value); + internal static ModelIdsResponses ComputerUsePreview20250311 { get; } = new ModelIdsResponses(ComputerUsePreview20250311Value); public static bool operator ==(ModelIdsResponses left, ModelIdsResponses right) => left.Equals(right); diff --git a/src/Generated/OpenAIClient.cs b/src/Generated/OpenAIClient.cs index c9f1a8883..f312f774c 100644 --- a/src/Generated/OpenAIClient.cs +++ b/src/Generated/OpenAIClient.cs @@ -5,8 +5,6 @@ using System; using System.ClientModel; using System.ClientModel.Primitives; -using System.Diagnostics.CodeAnalysis; -using System.Threading; using OpenAI.Responses; namespace OpenAI @@ -24,11 +22,5 @@ protected OpenAIClient() } public ClientPipeline Pipeline { get; } - - [Experimental("OPENAI001")] - public virtual ResponsesClient GetResponsesClient() - { - return Volatile.Read(ref _cachedResponsesClient) ?? Interlocked.CompareExchange(ref _cachedResponsesClient, new ResponsesClient(Pipeline, _endpoint), null) ?? _cachedResponsesClient; - } } } diff --git a/src/Generated/OpenAIModelFactory.cs b/src/Generated/OpenAIModelFactory.cs index 68a06a4b2..7776be586 100644 --- a/src/Generated/OpenAIModelFactory.cs +++ b/src/Generated/OpenAIModelFactory.cs @@ -1332,6 +1332,37 @@ public static CodeInterpreterToolContainer CodeInterpreterToolContainer(string c return new CodeInterpreterToolContainer(containerId, containerConfiguration, default); } + public static CreateResponseOptions CreateResponseOptions(IDictionary metadata = default, float? temperature = default, float? topP = default, string user = default, ResponseServiceTier? serviceTier = default, string previousResponseId = default, string model = default, ResponseReasoningOptions reasoningOptions = default, bool? isBackgroundModeEnabled = default, int? maxOutputTokenCount = default, string instructions = default, ResponseTextOptions textOptions = default, IEnumerable tools = default, ResponseToolChoice toolChoice = default, ResponseTruncationMode? truncationMode = default, IEnumerable inputItems = default, IEnumerable includedProperties = default, bool? isParallelToolCallsEnabled = default, bool? isStoredOutputEnabled = default, bool? isStreamingEnabled = default) + { + metadata ??= new ChangeTrackingDictionary(); + tools ??= new ChangeTrackingList(); + inputItems ??= new ChangeTrackingList(); + includedProperties ??= new ChangeTrackingList(); + + return new CreateResponseOptions( + metadata, + temperature, + topP, + user, + serviceTier, + previousResponseId, + model, + reasoningOptions, + isBackgroundModeEnabled, + maxOutputTokenCount, + instructions, + textOptions, + tools.ToList(), + toolChoice, + truncationMode, + inputItems.ToList(), + includedProperties.ToList(), + isParallelToolCallsEnabled, + isStoredOutputEnabled, + isStreamingEnabled, + default); + } + public static VectorStoreCollectionOptions VectorStoreCollectionOptions(string afterId = default, string beforeId = default, int? pageSizeLimit = default, VectorStoreCollectionOrder? order = default) { return new VectorStoreCollectionOptions(afterId, beforeId, pageSizeLimit, order, additionalBinaryDataProperties: null); diff --git a/tests/Responses/ResponseStoreTests.cs b/tests/Responses/ResponseStoreTests.cs index 6f3cb5240..1cd924575 100644 --- a/tests/Responses/ResponseStoreTests.cs +++ b/tests/Responses/ResponseStoreTests.cs @@ -34,7 +34,7 @@ public async Task GetInputItemsWithPagination() ResponseItem.CreateUserMessageItem("Item 4") }; - ResponseResult response = await client.CreateResponseAsync(new(inputItems)); + ResponseResult response = await client.CreateResponseAsync(new(inputItems, "gpt-4o-mini")); // Paginate through input items with a small page size var options = new ResponseItemCollectionOptions() @@ -77,7 +77,7 @@ public async Task GetInputItemsWithMultiPartPagination() ResponseItem.CreateUserMessageItem("Item 4") }; - ResponseResult response = await client.CreateResponseAsync(new(inputItems)); + ResponseResult response = await client.CreateResponseAsync(new(inputItems, "gpt-4o-mini")); // Paginate through input items with a small page size var options = new ResponseItemCollectionOptions() @@ -122,7 +122,7 @@ public async Task GetInputItemsWithAfterIdPagination() ResponseItem.CreateUserMessageItem("C") }; - ResponseResult response = await client.CreateResponseAsync(new(inputItems)); + ResponseResult response = await client.CreateResponseAsync(new(inputItems, "gpt-4o-mini")); string afterId = null; await foreach (ResponseItem first in client.GetResponseInputItemsAsync(response.Id)) @@ -162,7 +162,7 @@ public async Task GetInputItemsWithOrderFiltering() ResponseItem.CreateUserMessageItem("Second") }; - ResponseResult response = await client.CreateResponseAsync(new(inputItems)); + ResponseResult response = await client.CreateResponseAsync(new(inputItems, "gpt-4o-mini")); // Ascending var ascOptions = new ResponseItemCollectionOptions() @@ -209,7 +209,7 @@ public async Task GetInputItemsHandlesLargeLimits() ResponseItem.CreateUserMessageItem("alpha"), ResponseItem.CreateUserMessageItem("beta"), ResponseItem.CreateUserMessageItem("gamma"), - ])); + ], "gpt-4o-mini")); var options = new ResponseItemCollectionOptions() { PageSizeLimit = 100 }; @@ -234,7 +234,7 @@ public async Task GetInputItemsWithMinimalLimits() ResponseItem.CreateUserMessageItem("x"), ResponseItem.CreateUserMessageItem("y"), ResponseItem.CreateUserMessageItem("z"), - ])); + ], "gpt-4o-mini")); var options = new ResponseItemCollectionOptions() { PageSizeLimit = 1 }; @@ -259,7 +259,7 @@ public async Task GetInputItemsWithCancellationToken() ResponseItem.CreateUserMessageItem("ct1"), ResponseItem.CreateUserMessageItem("ct2"), ResponseItem.CreateUserMessageItem("ct3"), - ])); + ], "gpt-4o-mini")); using var cts = new System.Threading.CancellationTokenSource(); @@ -295,7 +295,7 @@ public async Task GetInputItemsWithCombinedOptions() ResponseItem.CreateUserMessageItem("co1"), ResponseItem.CreateUserMessageItem("co2"), ResponseItem.CreateUserMessageItem("co3"), - ])); + ], "gpt-4o-mini")); using var cts = new System.Threading.CancellationTokenSource(TimeSpan.FromSeconds(30)); diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index b4d70d5d5..3542810c2 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -81,7 +81,7 @@ public async Task ComputerToolWithScreenshotRoundTrip() [ ResponseItem.CreateDeveloperMessageItem("Call tools when the user asks to perform computer-related tasks like clicking interface elements."), ResponseItem.CreateUserMessageItem("Click on the Save button.") - ]) + ], "gpt-4o-mini") { Tools = { computerTool }, TruncationMode = ResponseTruncationMode.Auto, @@ -103,8 +103,8 @@ public async Task ComputerToolWithScreenshotRoundTrip() ComputerCallOutput.CreateScreenshotOutput(screenshotBytes, "image/png")); responseOptions.PreviousResponseId = response.Id; - responseOptions.Input.Clear(); - responseOptions.Input.Add(screenshotReply); + responseOptions.InputItems.Clear(); + responseOptions.InputItems.Add(screenshotReply); response = await client.CreateResponseAsync(responseOptions); } else if (computerCall.Action.Kind == ComputerCallActionKind.Click) @@ -123,8 +123,8 @@ public async Task ComputerToolWithScreenshotRoundTrip() || assistantText.Contains("please confirm"))) { responseOptions.PreviousResponseId = response.Id; - responseOptions.Input.Clear(); - responseOptions.Input.Add( + responseOptions.InputItems.Clear(); + responseOptions.InputItems.Add( ResponseItem.CreateAssistantMessageItem("Yes, proceed.")); response = await client.CreateResponseAsync(responseOptions); } @@ -140,7 +140,7 @@ public async Task WebSearchCall() { ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Searching the internet, what's the weather like in Seattle?")]) + new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Searching the internet, what's the weather like in Seattle?")], "gpt-4o-mini") { Tools = { @@ -167,7 +167,7 @@ public async Task WebSearchCallPreview() { ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new CreateResponseOptions([ResponseItem.CreateUserMessageItem("What was a positive news story from today?")]) + new CreateResponseOptions([ResponseItem.CreateUserMessageItem("What was a positive news story from today?")], "gpt-4o-mini") { Tools = { @@ -196,7 +196,7 @@ public async Task WebSearchCallStreaming() const string message = "Searching the internet, what's the weather like in San Francisco?"; - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)], "gpt-4o-mini") { Tools = { @@ -262,7 +262,7 @@ public async Task ResponseWithImageGenTool() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -302,7 +302,7 @@ public async Task ImageGenToolStreaming() const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)], "gpt-4o-mini") { Tools = { @@ -385,7 +385,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -419,7 +419,7 @@ public async Task ImageGenToolInputMaskWithImageUri() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -470,7 +470,7 @@ public async Task ImageGenToolInputMaskWithFileId() } Validate(file); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -502,12 +502,12 @@ public async Task ImageGenToolInputMaskWithFileId() [RecordedTest] public async Task StreamingResponses() { - ResponsesClient client = GetTestClient("gpt-4o-mini"); // "computer-use-alpha"); + ResponsesClient client = GetTestClient(); // "computer-use-alpha"); List inputItems = [ResponseItem.CreateUserMessageItem("Hello, world!")]; List deltaTextSegments = []; string finalResponseText = null; - await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync(new(inputItems))) + await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync(new(inputItems, "gpt-4o-mini"))) { Console.WriteLine(ModelReaderWriter.Write(update)); if (update is StreamingResponseOutputTextDeltaUpdate outputTextDeltaUpdate) @@ -530,10 +530,10 @@ public async Task StreamingResponses() [RecordedTest] public async Task StreamingResponsesWithReasoningSummary() { - ResponsesClient client = GetTestClient("o3-mini"); + ResponsesClient client = GetTestClient(); List inputItems = [ResponseItem.CreateUserMessageItem("I’m visiting New York for 3 days and love food and art. What’s the best way to plan my trip?")]; - CreateResponseOptions options = new(inputItems) + CreateResponseOptions options = new(inputItems , "o3-mini") { ReasoningOptions = new() { @@ -600,7 +600,7 @@ public async Task ResponsesHelloWorldWithTool(string model) [ ResponseContentPart.CreateInputTextPart("good morning, responses!"), ]), - ]) + ], model) { Tools = { @@ -639,9 +639,9 @@ public async Task ResponsesHelloWorldWithTool(string model) [RecordedTest] public async Task ResponsesWithReasoning() { - ResponsesClient client = GetTestClient("o3-mini"); + ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What's the best way to fold a burrito?")], "o3-mini") { ReasoningOptions = new() { @@ -689,7 +689,7 @@ ResponseContentPart contentPart await foreach (StreamingResponseUpdate update in client.CreateResponseStreamingAsync( - new ([inputItem]) + new ([inputItem], model) { TruncationMode = ResponseTruncationMode.Auto, })) @@ -703,7 +703,7 @@ public async Task CanDeleteResponse() { ResponsesClient client = GetTestClient(); - ResponseResult response = await client.CreateResponseAsync(new([ResponseItem.CreateUserMessageItem("Hello, model!")])); + ResponseResult response = await client.CreateResponseAsync(new([ResponseItem.CreateUserMessageItem("Hello, model!")], "gpt-4o-mini")); async Task RetrieveThatResponseAsync() { @@ -725,7 +725,7 @@ public async Task CanOptOutOfStorage() ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new ([ResponseItem.CreateUserMessageItem("Hello, model!")]) + new ([ResponseItem.CreateUserMessageItem("Hello, model!")], "gpt-4o-mini") { IsStoredOutputEnabled = false, }); @@ -740,7 +740,7 @@ public async Task ResponseServiceTierWorks() ResponsesClient client = GetTestClient(); MessageResponseItem message = ResponseItem.CreateUserMessageItem("Using a comprehensive evaluation of popular media in the 1970s and 1980s, what were the most common sci-fi themes?"); - CreateResponseOptions options = new([message]) + CreateResponseOptions options = new([message], "gpt-4o-mini") { ServiceTier = ResponseServiceTier.Default, }; @@ -755,7 +755,7 @@ public async Task OutputTextMethod() { ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")])); + new([ResponseItem.CreateUserMessageItem("Respond with only the word hello.")], "gpt-4o-mini")); var outputText = response.GetOutputText(); Assert.That(outputText.Length, Is.GreaterThan(0).And.LessThan(7)); Assert.That(outputText.ToLower(), Does.Contain("hello")); @@ -764,7 +764,7 @@ public async Task OutputTextMethod() Assert.That(response.GetOutputText().ToLower(), Does.EndWith("more text!")); response = await client.CreateResponseAsync( - new ([ResponseItem.CreateUserMessageItem("How's the weather?")]) + new ([ResponseItem.CreateUserMessageItem("How's the weather?")], "gpt-4o-mini") { Tools = { @@ -790,7 +790,7 @@ public async Task MessageHistoryWorks() ResponseItem.CreateUserMessageItem("Hello, Assistant, my name is Bob!"), ResponseItem.CreateAssistantMessageItem("Hello, Bob. It's a nice, sunny day!"), ResponseItem.CreateUserMessageItem("What's my name and what did you tell me the weather was like?"), - ])); + ], "gpt-4o-mini")); Assert.That(response, Is.Not.Null); } @@ -811,7 +811,7 @@ public async Task ImageInputWorks() ResponseContentPart.CreateInputTextPart("Please describe this picture for me"), ResponseContentPart.CreateInputImagePart(imageBytes, "image/png", ResponseImageDetailLevel.Low), ]), - ])); + ], "gpt-4o-mini")); Console.WriteLine(response.GetOutputText()); Assert.That(response.GetOutputText().ToLowerInvariant(), Does.Contain("dog").Or.Contain("cat").IgnoreCase); @@ -841,7 +841,7 @@ public async Task FileInputFromIdWorks() ResponseContentPart.CreateInputFilePart(newFileToUse.Id), ]); - ResponseResult response = await client.CreateResponseAsync(new([messageItem])); + ResponseResult response = await client.CreateResponseAsync(new([messageItem], "gpt-4o-mini")); Assert.That(response?.GetOutputText().ToLower(), Does.Contain("pizza")); } @@ -861,7 +861,7 @@ public async Task FileInputFromBinaryWorks() ResponseContentPart.CreateInputFilePart(fileBytes, "application/pdf", "test_favorite_foods.pdf"), ]); - ResponseResult response = await client.CreateResponseAsync(new([messageItem])); + ResponseResult response = await client.CreateResponseAsync(new([messageItem], "gpt-4o-mini")); Assert.That(response?.GetOutputText(), Does.Contain("pizza")); } @@ -888,7 +888,7 @@ public async Task AllInstructionMethodsWork(ResponsesTestInstructionMethod instr const string userMessage = "Hello, model!"; messages.Add(ResponseItem.CreateUserMessageItem(userMessage)); - CreateResponseOptions options = new(messages); + CreateResponseOptions options = new(messages, "gpt-4o-mini"); if (instructionMethod == ResponsesTestInstructionMethod.InstructionsProperty) { @@ -944,9 +944,9 @@ public async Task TwoTurnCrossModel() ResponseResult response = await client.CreateResponseAsync(new( - [ResponseItem.CreateUserMessageItem("Hello, Assistant! My name is Travis.")])); + [ResponseItem.CreateUserMessageItem("Hello, Assistant! My name is Travis.")], "gpt-4o-mini")); ResponseResult response2 = await client2.CreateResponseAsync( - new ([ResponseItem.CreateUserMessageItem("What's my name?")]) + new ([ResponseItem.CreateUserMessageItem("What's my name?")], "o3-mini") { PreviousResponseId = response.Id, }); @@ -960,7 +960,7 @@ public async Task StructuredOutputs(string modelName) ResponsesClient client = GetTestClient(modelName); ResponseResult response = await client.CreateResponseAsync( - new ([ResponseItem.CreateUserMessageItem("Write a JSON document with a list of five animals")]) + new ([ResponseItem.CreateUserMessageItem("Write a JSON document with a list of five animals")], "gpt-4o-mini") { TextOptions = new ResponseTextOptions() { @@ -1005,7 +1005,7 @@ public async Task FunctionCallWorks() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")], "gpt-4o-mini") { Tools = { s_GetWeatherAtLocationTool } }; @@ -1027,7 +1027,7 @@ public async Task FunctionCallWorks() }); ResponseItem functionReply = ResponseItem.CreateFunctionCallOutputItem(functionCall.CallId, "22 celcius and windy"); - CreateResponseOptions turn2Options = new([functionReply]) + CreateResponseOptions turn2Options = new([functionReply], "gpt-4o-mini") { PreviousResponseId = response.Id, Tools = { s_GetWeatherAtLocationTool }, @@ -1051,7 +1051,7 @@ public async Task FunctionCallStreamingWorks() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")], "gpt-4o-mini") { Tools = { s_GetWeatherAtLocationTool } }; @@ -1101,7 +1101,7 @@ public async Task MaxTokens() ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Write three haikus about tropical fruit")]) + new CreateResponseOptions([ResponseItem.CreateUserMessageItem("Write three haikus about tropical fruit")], "gpt-4o-mini") { MaxOutputTokenCount = 20, }); @@ -1122,7 +1122,7 @@ public async Task FunctionToolChoiceWorks() ResponseToolChoice toolChoice = ResponseToolChoice.CreateFunctionChoice(s_GetWeatherAtLocationToolName); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("What should I wear for the weather in San Francisco, CA?")], "gpt-4o-mini") { Tools = { s_GetWeatherAtLocationTool }, ToolChoice = toolChoice, @@ -1144,9 +1144,9 @@ ResponseToolChoice toolChoice [RecordedTest] public async Task CanStreamBackgroundResponses() { - ResponsesClient client = GetTestClient("gpt-4.1-mini"); + ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")], "gpt-4o-mini") { IsBackgroundModeEnabled = true, }; @@ -1205,9 +1205,9 @@ public async Task CanStreamBackgroundResponses() [RecordedTest] public async Task CanCancelBackgroundResponses() { - ResponsesClient client = GetTestClient("gpt-4.1-mini"); + ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Hello, model!")], "gpt-4.1-mini") { IsBackgroundModeEnabled = true, }; diff --git a/tests/Responses/ResponsesToolTests.cs b/tests/Responses/ResponsesToolTests.cs index db60db17f..f274ba047 100644 --- a/tests/Responses/ResponsesToolTests.cs +++ b/tests/Responses/ResponsesToolTests.cs @@ -34,7 +34,7 @@ public async Task MCPToolWorks() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -86,7 +86,7 @@ public async Task MCPToolStreamingWorks() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -200,7 +200,7 @@ public async Task MCPToolNeverRequiresApproval(bool useGlobalPolicy) } }); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -241,7 +241,7 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) } }); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -266,8 +266,8 @@ public async Task MCPToolAlwaysRequiresApproval(bool useGlobalPolicy) // Prepare the response. McpToolCallApprovalResponseItem approvalResponseItem = new(approvalRequestItem.Id, true); options.PreviousResponseId = response1.Id; - options.Input.Clear(); - options.Input.Add(approvalResponseItem); + options.InputItems.Clear(); + options.InputItems.Add(approvalResponseItem); ResponseResult response2 = await client.CreateResponseAsync(options); Assert.That(response2.OutputItems, Has.Count.GreaterThan(0)); @@ -282,7 +282,7 @@ public async Task MCPToolWithAllowedTools() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -323,7 +323,7 @@ public async Task MCPToolWithDisallowedTools() McpToolCallApprovalPolicy approvalPolicy = new McpToolCallApprovalPolicy(GlobalMcpToolCallApprovalPolicy.NeverRequireApproval); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Roll 2d4+1")], "gpt-5") { Tools = { new McpTool(serverLabel, serverUri) @@ -375,7 +375,7 @@ public async Task FileSearch() ResponsesClient client = GetTestClient(); ResponseResult response = await client.CreateResponseAsync( - new([ResponseItem.CreateUserMessageItem("Using the file search tool, what's Travis's favorite food?")]) + new([ResponseItem.CreateUserMessageItem("Using the file search tool, what's Travis's favorite food?")], "gpt-4o-mini") { Tools = { @@ -410,7 +410,7 @@ public async Task CodeInterpreterToolWithoutFileIds() ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration())); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -440,7 +440,7 @@ public async Task CodeInterpreterToolWithEmptyFileIds() ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(new AutomaticCodeInterpreterToolContainerConfiguration())); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Generate a simple chart using matplotlib. Ensure you emit debug logging and include any resulting log file output.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -483,7 +483,7 @@ public async Task CodeInterpreterToolWithContainerIdFromContainerApi() { // Create CodeInterpreter tool with the container ID ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(containerId)); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -548,7 +548,7 @@ public async Task CodeInterpreterToolWithUploadedFileIds() // Create CodeInterpreter tool with uploaded file IDs ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds))); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Analyze the CSV data in the uploaded file and create a simple visualization. Also run the Python script that was uploaded.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Analyze the CSV data in the uploaded file and create a simple visualization. Also run the Python script that was uploaded.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -588,7 +588,7 @@ public async Task CodeInterpreterToolStreaming() ResponsesClient client = GetTestClient(); ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(new AutomaticCodeInterpreterToolContainerConfiguration())); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code and show me the code step by step.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Calculate the factorial of 5 using Python code and show me the code step by step.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -635,7 +635,7 @@ public async Task CodeInterpreterToolStreamingWithFiles() // Create CodeInterpreter tool with uploaded file IDs ResponseTool codeInterpreterTool = ResponseTool.CreateCodeInterpreterTool(new CodeInterpreterToolContainer(CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds))); - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Load the CSV file and create a simple plot visualization showing the relationship between x and y values.")]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem("Load the CSV file and create a simple plot visualization showing the relationship between x and y values.")], "gpt-4o-mini") { Tools = { codeInterpreterTool }, }; @@ -684,7 +684,7 @@ public async Task ImageGenToolWorks() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -723,7 +723,7 @@ public async Task ImageGenToolStreaming() const string message = "Draw a gorgeous image of a river made of white owl feathers, snaking its way through a serene winter landscape"; - CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)]) + CreateResponseOptions responseOptions = new([ResponseItem.CreateUserMessageItem(message)], "gpt-4o-mini") { Tools = { @@ -807,7 +807,7 @@ public async Task ImageGenToolInputMaskWithImageBytes() string imageFilename = "images_dog_and_cat.png"; string imagePath = Path.Combine("Assets", imageFilename); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -840,7 +840,7 @@ public async Task ImageGenToolInputMaskWithImageUri() { ResponsesClient client = GetTestClient(); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { @@ -890,7 +890,7 @@ public async Task ImageGenToolInputMaskWithFileId() } Validate(file); - CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")]) + CreateResponseOptions options = new([ResponseItem.CreateUserMessageItem("Generate an image of gray tabby cat hugging an otter with an orange scarf")], "gpt-4o-mini") { Tools = { diff --git a/tests/Utility/TestHelpers.cs b/tests/Utility/TestHelpers.cs index 242366a5d..54423a097 100644 --- a/tests/Utility/TestHelpers.cs +++ b/tests/Utility/TestHelpers.cs @@ -121,7 +121,7 @@ public static T GetTestClient( TestScenario.Realtime => new RealtimeClient(credential, options), #pragma warning restore #pragma warning disable OPENAI003 - TestScenario.Responses => new ResponsesClient(model, credential, options), + TestScenario.Responses => new ResponsesClient(credential, options), #pragma warning restore _ => throw new NotImplementedException(), }; diff --git a/tspCodeModel.json b/tspCodeModel.json index 50d2fa449..feea67a46 100644 --- a/tspCodeModel.json +++ b/tspCodeModel.json @@ -108486,13 +108486,471 @@ { "$id": "8220", "kind": "model", + "name": "DotNetCreateResponse", + "namespace": "OpenAI", + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse", + "usage": "Input,Json", + "decorators": [ + { + "name": "TypeSpec.HttpClient.CSharp.@dynamicModel", + "arguments": {} + } + ], + "properties": [ + { + "$id": "8221", + "kind": "property", + "name": "metadata", + "serializedName": "metadata", + "doc": "Set of 16 key-value pairs that can be attached to an object. This can be\nuseful for storing additional information about the object in a structured\nformat, and querying for objects via API or the dashboard.\n\nKeys are strings with a maximum length of 64 characters. Values are strings\nwith a maximum length of 512 characters.", + "type": { + "$ref": "2445" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.metadata", + "serializationOptions": { + "json": { + "name": "metadata" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8222", + "kind": "property", + "name": "temperature", + "serializedName": "temperature", + "doc": "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.\nWe generally recommend altering this or `top_p` but not both.", + "type": { + "$ref": "2977" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.temperature", + "serializationOptions": { + "json": { + "name": "temperature" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8223", + "kind": "property", + "name": "top_p", + "serializedName": "top_p", + "doc": "An alternative to sampling with temperature, called nucleus sampling,\nwhere the model considers the results of the tokens with top_p probability\nmass. So 0.1 means only the tokens comprising the top 10% probability mass\nare considered.\n\nWe generally recommend altering this or `temperature` but not both.", + "type": { + "$ref": "2980" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.top_p", + "serializationOptions": { + "json": { + "name": "top_p" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8224", + "kind": "property", + "name": "user", + "serializedName": "user", + "doc": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices#end-user-ids).", + "type": { + "$id": "8225", + "kind": "string", + "name": "string", + "crossLanguageDefinitionId": "TypeSpec.string", + "decorators": [] + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.user", + "serializationOptions": { + "json": { + "name": "user" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8226", + "kind": "property", + "name": "service_tier", + "serializedName": "service_tier", + "type": { + "$ref": "497" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.service_tier", + "serializationOptions": { + "json": { + "name": "service_tier" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8227", + "kind": "property", + "name": "previous_response_id", + "serializedName": "previous_response_id", + "doc": "The unique ID of the previous response to the model. Use this to\ncreate multi-turn conversations. Learn more about\n[conversation state](/docs/guides/conversation-state).", + "type": { + "$ref": "4635" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.previous_response_id", + "serializationOptions": { + "json": { + "name": "previous_response_id" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8228", + "kind": "property", + "name": "model", + "serializedName": "model", + "doc": "Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI\noffers a wide range of models with different capabilities, performance\ncharacteristics, and price points. Refer to the [model guide](/docs/models)\nto browse and compare available models.", + "type": { + "$ref": "503" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.model", + "serializationOptions": { + "json": { + "name": "model" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8229", + "kind": "property", + "name": "reasoning", + "serializedName": "reasoning", + "type": { + "$ref": "4639" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.reasoning", + "serializationOptions": { + "json": { + "name": "reasoning" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8230", + "kind": "property", + "name": "background", + "serializedName": "background", + "doc": "Whether to run the model response in the background.\n[Learn more](/docs/guides/background).", + "type": { + "$ref": "4648" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.background", + "serializationOptions": { + "json": { + "name": "background" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8231", + "kind": "property", + "name": "max_output_tokens", + "serializedName": "max_output_tokens", + "doc": "An upper bound for the number of tokens that can be generated for a response, including visible output tokens and [reasoning tokens](/docs/guides/reasoning).", + "type": { + "$ref": "4651" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.max_output_tokens", + "serializationOptions": { + "json": { + "name": "max_output_tokens" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8232", + "kind": "property", + "name": "instructions", + "serializedName": "instructions", + "doc": "Inserts a system (or developer) message as the first item in the model's context.\n\nWhen using along with `previous_response_id`, the instructions from a previous\nresponse will not be carried over to the next response. This makes it simple\nto swap out system (or developer) messages in new responses.", + "type": { + "$ref": "4654" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.instructions", + "serializationOptions": { + "json": { + "name": "instructions" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8233", + "kind": "property", + "name": "text", + "serializedName": "text", + "doc": "Configuration options for a text response from the model. Can be plain\ntext or structured JSON data. Learn more:\n- [Text inputs and outputs](/docs/guides/text)\n- [Structured Outputs](/docs/guides/structured-outputs)", + "type": { + "$ref": "4657" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.text", + "serializationOptions": { + "json": { + "name": "text" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8234", + "kind": "property", + "name": "tools", + "serializedName": "tools", + "doc": "An array of tools the model may call while generating a response. You\ncan specify which tool to use by setting the `tool_choice` parameter.\n\nThe two categories of tools you can provide the model are:\n\n- **Built-in tools**: Tools that are provided by OpenAI that extend the\n model's capabilities, like [web search](/docs/guides/tools-web-search)\n or [file search](/docs/guides/tools-file-search). Learn more about\n [built-in tools](/docs/guides/tools).\n- **Function calls (custom tools)**: Functions that are defined by you,\n enabling the model to call your own code. Learn more about\n [function calling](/docs/guides/function-calling).", + "type": { + "$ref": "4346" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.tools", + "serializationOptions": { + "json": { + "name": "tools" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8235", + "kind": "property", + "name": "tool_choice", + "serializedName": "tool_choice", + "doc": "How the model should select which tool (or tools) to use when generating\na response. See the `tools` parameter to see how to specify which tools\nthe model can call.", + "type": { + "$ref": "4661" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.tool_choice", + "serializationOptions": { + "json": { + "name": "tool_choice" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8236", + "kind": "property", + "name": "truncation", + "serializedName": "truncation", + "doc": "The truncation strategy to use for the model response.\n- `auto`: If the context of this response and previous ones exceeds\n the model's context window size, the model will truncate the\n response to fit the context window by dropping input items in the\n middle of the conversation.\n- `disabled` (default): If a model response will exceed the context window\n size for a model, the request will fail with a 400 error.", + "type": { + "$ref": "4697" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.truncation", + "serializationOptions": { + "json": { + "name": "truncation" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8237", + "kind": "property", + "name": "input", + "serializedName": "input", + "doc": "Text, image, or file inputs to the model, used to generate a response.\n\nLearn more:\n- [Text inputs and outputs](/docs/guides/text)\n- [Image inputs](/docs/guides/images)\n- [File inputs](/docs/guides/pdf-files)\n- [Conversation state](/docs/guides/conversation-state)\n- [Function calling](/docs/guides/function-calling)", + "type": { + "$ref": "4699" + }, + "optional": false, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.input", + "serializationOptions": { + "json": { + "name": "input" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8238", + "kind": "property", + "name": "include", + "serializedName": "include", + "doc": "Specify additional output data to include in the model response. Currently\nsupported values are:\n- `file_search_call.results`: Include the search results of\n the file search tool call.\n- `message.input_image.image_url`: Include image urls from the input message.\n- `computer_call_output.output.image_url`: Include image urls from the computer call output.\n- `reasoning.encrypted_content`: Includes an encrypted version of reasoning\n tokens in reasoning item outputs. This enables reasoning items to be used in\n multi-turn conversations when using the Responses API statelessly (like\n when the `store` parameter is set to `false`, or when an organization is\n enrolled in the zero data retention program).\n- `code_interpreter_call.outputs`: Includes the outputs of python code execution\n in code interpreter tool call items.", + "type": { + "$ref": "5155" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.include", + "serializationOptions": { + "json": { + "name": "include" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8239", + "kind": "property", + "name": "parallel_tool_calls", + "serializedName": "parallel_tool_calls", + "doc": "Whether to allow the model to run tool calls in parallel.", + "type": { + "$ref": "5158" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.parallel_tool_calls", + "serializationOptions": { + "json": { + "name": "parallel_tool_calls" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8240", + "kind": "property", + "name": "store", + "serializedName": "store", + "doc": "Whether to store the generated model response for later retrieval via\nAPI.", + "type": { + "$ref": "5161" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.store", + "serializationOptions": { + "json": { + "name": "store" + } + }, + "isHttpMetadata": false + }, + { + "$id": "8241", + "kind": "property", + "name": "stream", + "serializedName": "stream", + "doc": "If set to true, the model response data will be streamed to the client\nas it is generated using [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format).\nSee the [Streaming section below](/docs/api-reference/responses-streaming)\nfor more information.", + "type": { + "$ref": "5164" + }, + "optional": true, + "readOnly": false, + "discriminator": false, + "flatten": false, + "decorators": [], + "crossLanguageDefinitionId": "OpenAI.DotNetCreateResponse.stream", + "serializationOptions": { + "json": { + "name": "stream" + } + }, + "isHttpMetadata": false + } + ] + }, + { + "$id": "8242", + "kind": "model", "name": "DotNetCombinedChunkingStrategyParam", "namespace": "OpenAI", "crossLanguageDefinitionId": "OpenAI.DotNetCombinedChunkingStrategyParam", "usage": "Input,Output", "decorators": [], "discriminatorProperty": { - "$id": "8221", + "$id": "8243", "kind": "property", "name": "type", "type": { @@ -108509,12 +108967,12 @@ }, "properties": [ { - "$ref": "8221" + "$ref": "8243" } ], "discriminatedSubtypes": { "auto": { - "$id": "8222", + "$id": "8244", "kind": "model", "name": "DotNetCombinedAutoChunkingStrategyParam", "namespace": "OpenAI", @@ -108524,16 +108982,16 @@ "discriminatorValue": "auto", "decorators": [], "baseModel": { - "$ref": "8220" + "$ref": "8242" }, "properties": [ { - "$id": "8223", + "$id": "8245", "kind": "property", "name": "type", "doc": "Always `auto`.", "type": { - "$id": "8224", + "$id": "8246", "kind": "enumvalue", "name": "auto", "value": "auto", @@ -108541,14 +108999,14 @@ "$ref": "1391" }, "enumType": { - "$id": "8225", + "$id": "8247", "kind": "enum", "decorators": [], "name": "DotNetCombinedChunkingStrategyParamType", "isGeneratedName": true, "namespace": "OpenAI", "valueType": { - "$id": "8226", + "$id": "8248", "kind": "string", "decorators": [], "doc": "A sequence of textual characters.", @@ -108557,42 +109015,42 @@ }, "values": [ { - "$id": "8227", + "$id": "8249", "kind": "enumvalue", "decorators": [], "name": "auto", "value": "auto", "valueType": { - "$ref": "8226" + "$ref": "8248" }, "enumType": { - "$ref": "8225" + "$ref": "8247" } }, { - "$id": "8228", + "$id": "8250", "kind": "enumvalue", "decorators": [], "name": "static", "value": "static", "valueType": { - "$ref": "8226" + "$ref": "8248" }, "enumType": { - "$ref": "8225" + "$ref": "8247" } }, { - "$id": "8229", + "$id": "8251", "kind": "enumvalue", "decorators": [], "name": "other", "value": "other", "valueType": { - "$ref": "8226" + "$ref": "8248" }, "enumType": { - "$ref": "8225" + "$ref": "8247" } } ], @@ -108619,7 +109077,7 @@ ] }, "static": { - "$id": "8230", + "$id": "8252", "kind": "model", "name": "DotNetCombinedStaticChunkingStrategyParam", "namespace": "OpenAI", @@ -108629,16 +109087,16 @@ "discriminatorValue": "static", "decorators": [], "baseModel": { - "$ref": "8220" + "$ref": "8242" }, "properties": [ { - "$id": "8231", + "$id": "8253", "kind": "property", "name": "type", "doc": "Always `static`.", "type": { - "$id": "8232", + "$id": "8254", "kind": "enumvalue", "name": "static", "value": "static", @@ -108646,7 +109104,7 @@ "$ref": "1391" }, "enumType": { - "$ref": "8225" + "$ref": "8247" }, "decorators": [] }, @@ -108660,7 +109118,7 @@ "isHttpMetadata": false }, { - "$id": "8233", + "$id": "8255", "kind": "property", "name": "static", "type": { @@ -108678,7 +109136,7 @@ ] }, "other": { - "$id": "8234", + "$id": "8256", "kind": "model", "name": "DotNetCombinedOtherChunkingStrategyParam", "namespace": "OpenAI", @@ -108688,16 +109146,16 @@ "discriminatorValue": "other", "decorators": [], "baseModel": { - "$ref": "8220" + "$ref": "8242" }, "properties": [ { - "$id": "8235", + "$id": "8257", "kind": "property", "name": "type", "doc": "Always `other`.", "type": { - "$id": "8236", + "$id": "8258", "kind": "enumvalue", "name": "other", "value": "other", @@ -108705,7 +109163,7 @@ "$ref": "1391" }, "enumType": { - "$ref": "8225" + "$ref": "8247" }, "decorators": [] }, @@ -108723,16 +109181,16 @@ } }, { - "$ref": "8222" + "$ref": "8244" }, { - "$ref": "8230" + "$ref": "8252" }, { - "$ref": "8234" + "$ref": "8256" }, { - "$id": "8237", + "$id": "8259", "kind": "model", "name": "VectorStoreCollectionOptions", "namespace": "OpenAI", @@ -108742,12 +109200,12 @@ "decorators": [], "properties": [ { - "$id": "8238", + "$id": "8260", "kind": "property", "name": "afterId", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8239", + "$id": "8261", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -108763,12 +109221,12 @@ "isHttpMetadata": true }, { - "$id": "8240", + "$id": "8262", "kind": "property", "name": "beforeId", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, starting with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "8241", + "$id": "8263", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -108784,12 +109242,12 @@ "isHttpMetadata": true }, { - "$id": "8242", + "$id": "8264", "kind": "property", "name": "pageSizeLimit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8243", + "$id": "8265", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -108805,7 +109263,7 @@ "isHttpMetadata": true }, { - "$id": "8244", + "$id": "8266", "kind": "property", "name": "order", "doc": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and`desc`\nfor descending order.", @@ -108824,7 +109282,7 @@ ] }, { - "$id": "8245", + "$id": "8267", "kind": "model", "name": "VectorStoreFileCollectionOptions", "namespace": "OpenAI", @@ -108834,12 +109292,12 @@ "decorators": [], "properties": [ { - "$id": "8246", + "$id": "8268", "kind": "property", "name": "afterId", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8247", + "$id": "8269", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -108855,12 +109313,12 @@ "isHttpMetadata": true }, { - "$id": "8248", + "$id": "8270", "kind": "property", "name": "beforeId", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, starting with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "8249", + "$id": "8271", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -108876,12 +109334,12 @@ "isHttpMetadata": true }, { - "$id": "8250", + "$id": "8272", "kind": "property", "name": "pageSizeLimit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8251", + "$id": "8273", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -108897,7 +109355,7 @@ "isHttpMetadata": true }, { - "$id": "8252", + "$id": "8274", "kind": "property", "name": "order", "doc": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and`desc`\nfor descending order.", @@ -108914,7 +109372,7 @@ "isHttpMetadata": true }, { - "$id": "8253", + "$id": "8275", "kind": "property", "name": "filter", "type": { @@ -108932,7 +109390,7 @@ ] }, { - "$id": "8254", + "$id": "8276", "kind": "model", "name": "VideoCollectionOptions", "namespace": "OpenAI", @@ -108942,12 +109400,12 @@ "decorators": [], "properties": [ { - "$id": "8255", + "$id": "8277", "kind": "property", "name": "afterId", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8256", + "$id": "8278", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -108963,12 +109421,12 @@ "isHttpMetadata": true }, { - "$id": "8257", + "$id": "8279", "kind": "property", "name": "pageSizeLimit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8258", + "$id": "8280", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -108984,7 +109442,7 @@ "isHttpMetadata": true }, { - "$id": "8259", + "$id": "8281", "kind": "property", "name": "order", "doc": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and`desc`\nfor descending order.", @@ -109005,20 +109463,20 @@ ], "clients": [ { - "$id": "8260", + "$id": "8282", "kind": "client", "name": "OpenAIClient", "namespace": "OpenAI", "methods": [], "parameters": [ { - "$id": "8261", + "$id": "8283", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8262", + "$id": "8284", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -109029,7 +109487,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8263", + "$id": "8285", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -109048,27 +109506,27 @@ "apiVersions": [], "children": [ { - "$id": "8264", + "$id": "8286", "kind": "client", "name": "Assistants", "namespace": "OpenAI", "methods": [ { - "$id": "8265", + "$id": "8287", "kind": "paging", "name": "GetAssistants", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of assistants.", "operation": { - "$id": "8266", + "$id": "8288", "name": "GetAssistants", "resourceName": "Assistants", "summary": "Returns a list of assistants.", "accessibility": "public", "parameters": [ { - "$id": "8267", + "$id": "8289", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -109084,7 +109542,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.listAssistants.accept" }, { - "$id": "8268", + "$id": "8290", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109100,13 +109558,13 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.listAssistants.openAIBeta" }, { - "$id": "8269", + "$id": "8291", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8270", + "$id": "8292", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -109121,7 +109579,7 @@ "readOnly": false }, { - "$id": "8271", + "$id": "8293", "kind": "query", "name": "order", "serializedName": "order", @@ -109138,13 +109596,13 @@ "readOnly": false }, { - "$id": "8272", + "$id": "8294", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8273", + "$id": "8295", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109159,13 +109617,13 @@ "readOnly": false }, { - "$id": "8274", + "$id": "8296", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "8275", + "$id": "8297", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109206,7 +109664,7 @@ }, "parameters": [ { - "$id": "8276", + "$id": "8298", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -109223,7 +109681,7 @@ "decorators": [] }, { - "$id": "8277", + "$id": "8299", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109240,13 +109698,13 @@ "decorators": [] }, { - "$id": "8278", + "$id": "8300", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8279", + "$id": "8301", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -109262,7 +109720,7 @@ "decorators": [] }, { - "$id": "8280", + "$id": "8302", "kind": "method", "name": "order", "serializedName": "order", @@ -109280,13 +109738,13 @@ "decorators": [] }, { - "$id": "8281", + "$id": "8303", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8282", + "$id": "8304", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109302,13 +109760,13 @@ "decorators": [] }, { - "$id": "8283", + "$id": "8305", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "8284", + "$id": "8306", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109342,7 +109800,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8272" + "$ref": "8294" }, "responseSegments": [ "last_id" @@ -109352,21 +109810,21 @@ } }, { - "$id": "8285", + "$id": "8307", "kind": "basic", "name": "createAssistant", "accessibility": "public", "apiVersions": [], "summary": "Create an assistant with a model and instructions.", "operation": { - "$id": "8286", + "$id": "8308", "name": "createAssistant", "resourceName": "Assistants", "summary": "Create an assistant with a model and instructions.", "accessibility": "public", "parameters": [ { - "$id": "8287", + "$id": "8309", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -109382,7 +109840,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.createAssistant.accept" }, { - "$id": "8288", + "$id": "8310", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109398,7 +109856,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.createAssistant.openAIBeta" }, { - "$id": "8289", + "$id": "8311", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -109415,7 +109873,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.createAssistant.contentType" }, { - "$id": "8290", + "$id": "8312", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -109463,7 +109921,7 @@ }, "parameters": [ { - "$id": "8291", + "$id": "8313", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -109480,7 +109938,7 @@ "decorators": [] }, { - "$id": "8292", + "$id": "8314", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109497,7 +109955,7 @@ "decorators": [] }, { - "$id": "8293", + "$id": "8315", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -109514,7 +109972,7 @@ "decorators": [] }, { - "$id": "8294", + "$id": "8316", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -109543,21 +110001,21 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.createAssistant" }, { - "$id": "8295", + "$id": "8317", "kind": "basic", "name": "getAssistant", "accessibility": "public", "apiVersions": [], "summary": "Retrieves an assistant.", "operation": { - "$id": "8296", + "$id": "8318", "name": "getAssistant", "resourceName": "Assistants", "summary": "Retrieves an assistant.", "accessibility": "public", "parameters": [ { - "$id": "8297", + "$id": "8319", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -109573,7 +110031,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.getAssistant.accept" }, { - "$id": "8298", + "$id": "8320", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109589,13 +110047,13 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.getAssistant.openAIBeta" }, { - "$id": "8299", + "$id": "8321", "kind": "path", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to retrieve.", "type": { - "$id": "8300", + "$id": "8322", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109639,7 +110097,7 @@ }, "parameters": [ { - "$id": "8301", + "$id": "8323", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -109656,7 +110114,7 @@ "decorators": [] }, { - "$id": "8302", + "$id": "8324", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109673,13 +110131,13 @@ "decorators": [] }, { - "$id": "8303", + "$id": "8325", "kind": "method", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to retrieve.", "type": { - "$id": "8304", + "$id": "8326", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109706,21 +110164,21 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.getAssistant" }, { - "$id": "8305", + "$id": "8327", "kind": "basic", "name": "modifyAssistant", "accessibility": "public", "apiVersions": [], "summary": "Modifies an assistant.", "operation": { - "$id": "8306", + "$id": "8328", "name": "modifyAssistant", "resourceName": "Assistants", "summary": "Modifies an assistant.", "accessibility": "public", "parameters": [ { - "$id": "8307", + "$id": "8329", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -109736,7 +110194,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.modifyAssistant.accept" }, { - "$id": "8308", + "$id": "8330", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109752,13 +110210,13 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.modifyAssistant.openAIBeta" }, { - "$id": "8309", + "$id": "8331", "kind": "path", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to modify.", "type": { - "$id": "8310", + "$id": "8332", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109776,7 +110234,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.modifyAssistant.assistant_id" }, { - "$id": "8311", + "$id": "8333", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -109793,7 +110251,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.modifyAssistant.contentType" }, { - "$id": "8312", + "$id": "8334", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -109841,7 +110299,7 @@ }, "parameters": [ { - "$id": "8313", + "$id": "8335", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -109858,7 +110316,7 @@ "decorators": [] }, { - "$id": "8314", + "$id": "8336", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109875,13 +110333,13 @@ "decorators": [] }, { - "$id": "8315", + "$id": "8337", "kind": "method", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to modify.", "type": { - "$id": "8316", + "$id": "8338", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -109897,7 +110355,7 @@ "decorators": [] }, { - "$id": "8317", + "$id": "8339", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -109914,7 +110372,7 @@ "decorators": [] }, { - "$id": "8318", + "$id": "8340", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -109943,21 +110401,21 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.modifyAssistant" }, { - "$id": "8319", + "$id": "8341", "kind": "basic", "name": "deleteAssistant", "accessibility": "public", "apiVersions": [], "summary": "Delete an assistant.", "operation": { - "$id": "8320", + "$id": "8342", "name": "deleteAssistant", "resourceName": "Assistants", "summary": "Delete an assistant.", "accessibility": "public", "parameters": [ { - "$id": "8321", + "$id": "8343", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -109973,7 +110431,7 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.deleteAssistant.accept" }, { - "$id": "8322", + "$id": "8344", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -109989,13 +110447,13 @@ "crossLanguageDefinitionId": "OpenAI.Assistants.deleteAssistant.openAIBeta" }, { - "$id": "8323", + "$id": "8345", "kind": "path", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to delete.", "type": { - "$id": "8324", + "$id": "8346", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -110039,7 +110497,7 @@ }, "parameters": [ { - "$id": "8325", + "$id": "8347", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -110056,7 +110514,7 @@ "decorators": [] }, { - "$id": "8326", + "$id": "8348", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -110073,13 +110531,13 @@ "decorators": [] }, { - "$id": "8327", + "$id": "8349", "kind": "method", "name": "assistant_id", "serializedName": "assistant_id", "doc": "The ID of the assistant to delete.", "type": { - "$id": "8328", + "$id": "8350", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -110108,13 +110566,13 @@ ], "parameters": [ { - "$id": "8329", + "$id": "8351", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8330", + "$id": "8352", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -110125,7 +110583,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8331", + "$id": "8353", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -110143,31 +110601,31 @@ "crossLanguageDefinitionId": "OpenAI.Assistants", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8332", + "$id": "8354", "kind": "client", "name": "Audio", "namespace": "OpenAI", "methods": [ { - "$id": "8333", + "$id": "8355", "kind": "basic", "name": "GenerateSpeech", "accessibility": "public", "apiVersions": [], "summary": "Generates audio from the input text.", "operation": { - "$id": "8334", + "$id": "8356", "name": "GenerateSpeech", "resourceName": "Audio", "summary": "Generates audio from the input text.", "accessibility": "public", "parameters": [ { - "$id": "8335", + "$id": "8357", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -110183,7 +110641,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createSpeech.accept" }, { - "$id": "8336", + "$id": "8358", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -110200,7 +110658,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createSpeech.contentType" }, { - "$id": "8337", + "$id": "8359", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -110225,7 +110683,7 @@ 200 ], "bodyType": { - "$id": "8338", + "$id": "8360", "kind": "bytes", "name": "bytes", "crossLanguageDefinitionId": "TypeSpec.bytes", @@ -110237,7 +110695,7 @@ "nameInResponse": "Transfer-Encoding", "doc": "chunked", "type": { - "$id": "8339", + "$id": "8361", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -110272,7 +110730,7 @@ }, "parameters": [ { - "$id": "8340", + "$id": "8362", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -110289,7 +110747,7 @@ "decorators": [] }, { - "$id": "8341", + "$id": "8363", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -110306,7 +110764,7 @@ "decorators": [] }, { - "$id": "8342", + "$id": "8364", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -110326,7 +110784,7 @@ ], "response": { "type": { - "$ref": "8338" + "$ref": "8360" } }, "isOverride": false, @@ -110335,21 +110793,21 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createSpeech" }, { - "$id": "8343", + "$id": "8365", "kind": "basic", "name": "TranscribeAudio", "accessibility": "public", "apiVersions": [], "summary": "Transcribes audio into the input language.", "operation": { - "$id": "8344", + "$id": "8366", "name": "TranscribeAudio", "resourceName": "Audio", "summary": "Transcribes audio into the input language.", "accessibility": "public", "parameters": [ { - "$id": "8345", + "$id": "8367", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -110365,7 +110823,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createTranscription.accept" }, { - "$id": "8346", + "$id": "8368", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -110381,7 +110839,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createTranscription.contentType" }, { - "$id": "8347", + "$id": "8369", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -110406,12 +110864,12 @@ 200 ], "bodyType": { - "$id": "8348", + "$id": "8370", "kind": "union", "name": "", "variantTypes": [ { - "$id": "8349", + "$id": "8371", "kind": "union", "name": "", "variantTypes": [ @@ -110463,7 +110921,7 @@ }, "parameters": [ { - "$id": "8350", + "$id": "8372", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -110480,7 +110938,7 @@ "decorators": [] }, { - "$id": "8351", + "$id": "8373", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -110497,7 +110955,7 @@ "decorators": [] }, { - "$id": "8352", + "$id": "8374", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -110516,7 +110974,7 @@ ], "response": { "type": { - "$ref": "8348" + "$ref": "8370" } }, "isOverride": false, @@ -110525,21 +110983,21 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createTranscription" }, { - "$id": "8353", + "$id": "8375", "kind": "basic", "name": "TranslateAudio", "accessibility": "public", "apiVersions": [], "summary": "Translates audio into English..", "operation": { - "$id": "8354", + "$id": "8376", "name": "TranslateAudio", "resourceName": "Audio", "summary": "Translates audio into English..", "accessibility": "public", "parameters": [ { - "$id": "8355", + "$id": "8377", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -110555,7 +111013,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createTranslation.accept" }, { - "$id": "8356", + "$id": "8378", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -110571,7 +111029,7 @@ "crossLanguageDefinitionId": "OpenAI.Audio.createTranslation.contentType" }, { - "$id": "8357", + "$id": "8379", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -110596,12 +111054,12 @@ 200 ], "bodyType": { - "$id": "8358", + "$id": "8380", "kind": "union", "name": "", "variantTypes": [ { - "$id": "8359", + "$id": "8381", "kind": "union", "name": "", "variantTypes": [ @@ -110616,7 +111074,7 @@ "decorators": [] }, { - "$id": "8360", + "$id": "8382", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -110657,7 +111115,7 @@ }, "parameters": [ { - "$id": "8361", + "$id": "8383", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -110674,7 +111132,7 @@ "decorators": [] }, { - "$id": "8362", + "$id": "8384", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -110691,7 +111149,7 @@ "decorators": [] }, { - "$id": "8363", + "$id": "8385", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -110710,7 +111168,7 @@ ], "response": { "type": { - "$ref": "8358" + "$ref": "8380" } }, "isOverride": false, @@ -110721,13 +111179,13 @@ ], "parameters": [ { - "$id": "8364", + "$id": "8386", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8365", + "$id": "8387", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -110738,7 +111196,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8366", + "$id": "8388", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -110756,31 +111214,31 @@ "crossLanguageDefinitionId": "OpenAI.Audio", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8367", + "$id": "8389", "kind": "client", "name": "Batches", "namespace": "OpenAI", "methods": [ { - "$id": "8368", + "$id": "8390", "kind": "basic", "name": "createBatch", "accessibility": "public", "apiVersions": [], "summary": "Creates and executes a batch from an uploaded file of requests", "operation": { - "$id": "8369", + "$id": "8391", "name": "createBatch", "resourceName": "Batches", "summary": "Creates and executes a batch from an uploaded file of requests", "accessibility": "public", "parameters": [ { - "$id": "8370", + "$id": "8392", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -110796,7 +111254,7 @@ "crossLanguageDefinitionId": "OpenAI.Batches.createBatch.accept" }, { - "$id": "8371", + "$id": "8393", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -110813,7 +111271,7 @@ "crossLanguageDefinitionId": "OpenAI.Batches.createBatch.contentType" }, { - "$id": "8372", + "$id": "8394", "kind": "body", "name": "createBatchRequest", "serializedName": "createBatchRequest", @@ -110861,7 +111319,7 @@ }, "parameters": [ { - "$id": "8373", + "$id": "8395", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -110878,13 +111336,13 @@ "decorators": [] }, { - "$id": "8374", + "$id": "8396", "kind": "method", "name": "input_file_id", "serializedName": "input_file_id", "doc": "The ID of an uploaded file that contains requests for the new batch.\n\nSee [upload file](/docs/api-reference/files/create) for how to upload a file.\n\nYour input file must be formatted as a [JSONL file](/docs/api-reference/batch/requestInput),\nand must be uploaded with the purpose `batch`.", "type": { - "$id": "8375", + "$id": "8397", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -110900,7 +111358,7 @@ "decorators": [] }, { - "$id": "8376", + "$id": "8398", "kind": "method", "name": "endpoint", "serializedName": "endpoint", @@ -110918,7 +111376,7 @@ "decorators": [] }, { - "$id": "8377", + "$id": "8399", "kind": "method", "name": "completion_window", "serializedName": "completion_window", @@ -110936,7 +111394,7 @@ "decorators": [] }, { - "$id": "8378", + "$id": "8400", "kind": "method", "name": "metadata", "serializedName": "metadata", @@ -110954,7 +111412,7 @@ "decorators": [] }, { - "$id": "8379", + "$id": "8401", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -110983,21 +111441,21 @@ "crossLanguageDefinitionId": "OpenAI.Batches.createBatch" }, { - "$id": "8380", + "$id": "8402", "kind": "paging", "name": "GetBatches", "accessibility": "public", "apiVersions": [], "summary": "List your organization's batches.", "operation": { - "$id": "8381", + "$id": "8403", "name": "GetBatches", "resourceName": "Batches", "summary": "List your organization's batches.", "accessibility": "public", "parameters": [ { - "$id": "8382", + "$id": "8404", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -111013,13 +111471,13 @@ "crossLanguageDefinitionId": "OpenAI.Batches.listBatches.accept" }, { - "$id": "8383", + "$id": "8405", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8384", + "$id": "8406", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111034,13 +111492,13 @@ "readOnly": false }, { - "$id": "8385", + "$id": "8407", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8386", + "$id": "8408", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -111081,7 +111539,7 @@ }, "parameters": [ { - "$id": "8387", + "$id": "8409", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -111098,13 +111556,13 @@ "decorators": [] }, { - "$id": "8388", + "$id": "8410", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8389", + "$id": "8411", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111120,13 +111578,13 @@ "decorators": [] }, { - "$id": "8390", + "$id": "8412", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8391", + "$id": "8413", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -111160,7 +111618,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8383" + "$ref": "8405" }, "responseSegments": [ "last_id" @@ -111170,21 +111628,21 @@ } }, { - "$id": "8392", + "$id": "8414", "kind": "basic", "name": "GetBatch", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a batch.", "operation": { - "$id": "8393", + "$id": "8415", "name": "GetBatch", "resourceName": "Batches", "summary": "Retrieves a batch.", "accessibility": "public", "parameters": [ { - "$id": "8394", + "$id": "8416", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -111200,13 +111658,13 @@ "crossLanguageDefinitionId": "OpenAI.Batches.retrieveBatch.accept" }, { - "$id": "8395", + "$id": "8417", "kind": "path", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the batch to retrieve.", "type": { - "$id": "8396", + "$id": "8418", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111250,7 +111708,7 @@ }, "parameters": [ { - "$id": "8397", + "$id": "8419", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -111267,13 +111725,13 @@ "decorators": [] }, { - "$id": "8398", + "$id": "8420", "kind": "method", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the batch to retrieve.", "type": { - "$id": "8399", + "$id": "8421", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111300,21 +111758,21 @@ "crossLanguageDefinitionId": "OpenAI.Batches.retrieveBatch" }, { - "$id": "8400", + "$id": "8422", "kind": "basic", "name": "cancelBatch", "accessibility": "public", "apiVersions": [], "summary": "Cancels an in-progress batch.", "operation": { - "$id": "8401", + "$id": "8423", "name": "cancelBatch", "resourceName": "Batches", "summary": "Cancels an in-progress batch.", "accessibility": "public", "parameters": [ { - "$id": "8402", + "$id": "8424", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -111330,13 +111788,13 @@ "crossLanguageDefinitionId": "OpenAI.Batches.cancelBatch.accept" }, { - "$id": "8403", + "$id": "8425", "kind": "path", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the batch to cancel.", "type": { - "$id": "8404", + "$id": "8426", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111380,7 +111838,7 @@ }, "parameters": [ { - "$id": "8405", + "$id": "8427", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -111397,13 +111855,13 @@ "decorators": [] }, { - "$id": "8406", + "$id": "8428", "kind": "method", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the batch to cancel.", "type": { - "$id": "8407", + "$id": "8429", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111432,13 +111890,13 @@ ], "parameters": [ { - "$id": "8408", + "$id": "8430", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8409", + "$id": "8431", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -111449,7 +111907,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8410", + "$id": "8432", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -111467,37 +111925,37 @@ "crossLanguageDefinitionId": "OpenAI.Batches", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8411", + "$id": "8433", "kind": "client", "name": "Chat", "namespace": "OpenAI", "methods": [ { - "$id": "8412", + "$id": "8434", "kind": "paging", "name": "listChatCompletions", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of chat completions.", "operation": { - "$id": "8413", + "$id": "8435", "name": "listChatCompletions", "resourceName": "Chat", "summary": "Returns a list of chat completions.", "accessibility": "public", "parameters": [ { - "$id": "8414", + "$id": "8436", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8415", + "$id": "8437", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111512,13 +111970,13 @@ "readOnly": false }, { - "$id": "8416", + "$id": "8438", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8417", + "$id": "8439", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -111533,7 +111991,7 @@ "readOnly": false }, { - "$id": "8418", + "$id": "8440", "kind": "query", "name": "order", "serializedName": "order", @@ -111550,7 +112008,7 @@ "readOnly": false }, { - "$id": "8419", + "$id": "8441", "kind": "query", "name": "metadata", "serializedName": "metadata", @@ -111566,12 +112024,12 @@ "readOnly": false }, { - "$id": "8420", + "$id": "8442", "kind": "query", "name": "model", "serializedName": "model", "type": { - "$id": "8421", + "$id": "8443", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111586,7 +112044,7 @@ "readOnly": false }, { - "$id": "8422", + "$id": "8444", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -111628,13 +112086,13 @@ }, "parameters": [ { - "$id": "8423", + "$id": "8445", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8424", + "$id": "8446", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111650,13 +112108,13 @@ "decorators": [] }, { - "$id": "8425", + "$id": "8447", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8426", + "$id": "8448", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -111672,7 +112130,7 @@ "decorators": [] }, { - "$id": "8427", + "$id": "8449", "kind": "method", "name": "order", "serializedName": "order", @@ -111690,7 +112148,7 @@ "decorators": [] }, { - "$id": "8428", + "$id": "8450", "kind": "method", "name": "metadata", "serializedName": "metadata", @@ -111707,12 +112165,12 @@ "decorators": [] }, { - "$id": "8429", + "$id": "8451", "kind": "method", "name": "model", "serializedName": "model", "type": { - "$id": "8430", + "$id": "8452", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111728,7 +112186,7 @@ "decorators": [] }, { - "$id": "8431", + "$id": "8453", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -111763,7 +112221,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8414" + "$ref": "8436" }, "responseSegments": [ "last_id" @@ -111773,21 +112231,21 @@ } }, { - "$id": "8432", + "$id": "8454", "kind": "basic", "name": "CompleteChat", "accessibility": "public", "apiVersions": [], "summary": "Creates a model response for the given chat conversation.", "operation": { - "$id": "8433", + "$id": "8455", "name": "CompleteChat", "resourceName": "Chat", "summary": "Creates a model response for the given chat conversation.", "accessibility": "public", "parameters": [ { - "$id": "8434", + "$id": "8456", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -111803,7 +112261,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.createChatCompletion.accept" }, { - "$id": "8435", + "$id": "8457", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -111820,7 +112278,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.createChatCompletion.contentType" }, { - "$id": "8436", + "$id": "8458", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -111845,7 +112303,7 @@ 200 ], "bodyType": { - "$id": "8437", + "$id": "8459", "kind": "union", "name": "", "variantTypes": [ @@ -111889,7 +112347,7 @@ }, "parameters": [ { - "$id": "8438", + "$id": "8460", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -111906,7 +112364,7 @@ "decorators": [] }, { - "$id": "8439", + "$id": "8461", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -111923,7 +112381,7 @@ "decorators": [] }, { - "$id": "8440", + "$id": "8462", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -111943,7 +112401,7 @@ ], "response": { "type": { - "$ref": "8437" + "$ref": "8459" } }, "isOverride": false, @@ -111952,27 +112410,27 @@ "crossLanguageDefinitionId": "OpenAI.Chat.createChatCompletion" }, { - "$id": "8441", + "$id": "8463", "kind": "basic", "name": "getChatCompletion", "accessibility": "public", "apiVersions": [], "summary": "Get a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` will be returned.", "operation": { - "$id": "8442", + "$id": "8464", "name": "getChatCompletion", "resourceName": "Chat", "summary": "Get a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` will be returned.", "accessibility": "public", "parameters": [ { - "$id": "8443", + "$id": "8465", "kind": "path", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to retrieve.", "type": { - "$id": "8444", + "$id": "8466", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -111990,7 +112448,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.getChatCompletion.completion_id" }, { - "$id": "8445", + "$id": "8467", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112032,13 +112490,13 @@ }, "parameters": [ { - "$id": "8446", + "$id": "8468", "kind": "method", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to retrieve.", "type": { - "$id": "8447", + "$id": "8469", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112054,7 +112512,7 @@ "decorators": [] }, { - "$id": "8448", + "$id": "8470", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -112082,27 +112540,27 @@ "crossLanguageDefinitionId": "OpenAI.Chat.getChatCompletion" }, { - "$id": "8449", + "$id": "8471", "kind": "basic", "name": "updateChatCompletion", "accessibility": "public", "apiVersions": [], "summary": "Modify a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` can be modified. Currently, the only supported modification is to update the `metadata` field.\")", "operation": { - "$id": "8450", + "$id": "8472", "name": "updateChatCompletion", "resourceName": "Chat", "summary": "Modify a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` can be modified. Currently, the only supported modification is to update the `metadata` field.\")", "accessibility": "public", "parameters": [ { - "$id": "8451", + "$id": "8473", "kind": "path", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to update.", "type": { - "$id": "8452", + "$id": "8474", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112120,7 +112578,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.updateChatCompletion.completion_id" }, { - "$id": "8453", + "$id": "8475", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -112137,7 +112595,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.updateChatCompletion.contentType" }, { - "$id": "8454", + "$id": "8476", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112153,7 +112611,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.updateChatCompletion.accept" }, { - "$id": "8455", + "$id": "8477", "kind": "body", "name": "updateChatCompletionRequest", "serializedName": "updateChatCompletionRequest", @@ -112201,13 +112659,13 @@ }, "parameters": [ { - "$id": "8456", + "$id": "8478", "kind": "method", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to update.", "type": { - "$id": "8457", + "$id": "8479", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112223,7 +112681,7 @@ "decorators": [] }, { - "$id": "8458", + "$id": "8480", "kind": "method", "name": "metadata", "serializedName": "metadata", @@ -112241,7 +112699,7 @@ "decorators": [] }, { - "$id": "8459", + "$id": "8481", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -112259,7 +112717,7 @@ "decorators": [] }, { - "$id": "8460", + "$id": "8482", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -112287,27 +112745,27 @@ "crossLanguageDefinitionId": "OpenAI.Chat.updateChatCompletion" }, { - "$id": "8461", + "$id": "8483", "kind": "basic", "name": "deleteChatCompletion", "accessibility": "public", "apiVersions": [], "summary": "Delete a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` can be deleted.", "operation": { - "$id": "8462", + "$id": "8484", "name": "deleteChatCompletion", "resourceName": "Chat", "summary": "Delete a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` can be deleted.", "accessibility": "public", "parameters": [ { - "$id": "8463", + "$id": "8485", "kind": "path", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to delete.", "type": { - "$id": "8464", + "$id": "8486", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112325,7 +112783,7 @@ "crossLanguageDefinitionId": "OpenAI.Chat.deleteChatCompletion.completion_id" }, { - "$id": "8465", + "$id": "8487", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112367,13 +112825,13 @@ }, "parameters": [ { - "$id": "8466", + "$id": "8488", "kind": "method", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to delete.", "type": { - "$id": "8467", + "$id": "8489", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112389,7 +112847,7 @@ "decorators": [] }, { - "$id": "8468", + "$id": "8490", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -112417,27 +112875,27 @@ "crossLanguageDefinitionId": "OpenAI.Chat.deleteChatCompletion" }, { - "$id": "8469", + "$id": "8491", "kind": "paging", "name": "getChatCompletionMessages", "accessibility": "public", "apiVersions": [], "summary": "Get the messages of a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` will be returned.", "operation": { - "$id": "8470", + "$id": "8492", "name": "getChatCompletionMessages", "resourceName": "Chat", "summary": "Get the messages of a stored chat completion. Only Chat Completions that have been created with the `store` parameter set to `true` will be returned.", "accessibility": "public", "parameters": [ { - "$id": "8471", + "$id": "8493", "kind": "path", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to retrieve messages for.", "type": { - "$id": "8472", + "$id": "8494", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112455,13 +112913,13 @@ "crossLanguageDefinitionId": "OpenAI.Chat.getChatCompletionMessages.completion_id" }, { - "$id": "8473", + "$id": "8495", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8474", + "$id": "8496", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112476,13 +112934,13 @@ "readOnly": false }, { - "$id": "8475", + "$id": "8497", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8476", + "$id": "8498", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -112497,7 +112955,7 @@ "readOnly": false }, { - "$id": "8477", + "$id": "8499", "kind": "query", "name": "order", "serializedName": "order", @@ -112514,7 +112972,7 @@ "readOnly": false }, { - "$id": "8478", + "$id": "8500", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112556,13 +113014,13 @@ }, "parameters": [ { - "$id": "8479", + "$id": "8501", "kind": "method", "name": "completion_id", "serializedName": "completion_id", "doc": "The ID of the stored chat completion to retrieve messages for.", "type": { - "$id": "8480", + "$id": "8502", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112578,13 +113036,13 @@ "decorators": [] }, { - "$id": "8481", + "$id": "8503", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8482", + "$id": "8504", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112600,13 +113058,13 @@ "decorators": [] }, { - "$id": "8483", + "$id": "8505", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8484", + "$id": "8506", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -112622,7 +113080,7 @@ "decorators": [] }, { - "$id": "8485", + "$id": "8507", "kind": "method", "name": "order", "serializedName": "order", @@ -112640,7 +113098,7 @@ "decorators": [] }, { - "$id": "8486", + "$id": "8508", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -112675,7 +113133,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8473" + "$ref": "8495" }, "responseSegments": [ "last_id" @@ -112687,13 +113145,13 @@ ], "parameters": [ { - "$id": "8487", + "$id": "8509", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8488", + "$id": "8510", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -112704,7 +113162,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8489", + "$id": "8511", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -112722,35 +113180,35 @@ "crossLanguageDefinitionId": "OpenAI.Chat", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8490", + "$id": "8512", "kind": "client", "name": "Containers", "namespace": "OpenAI", "methods": [ { - "$id": "8491", + "$id": "8513", "kind": "paging", "name": "listContainers", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8492", + "$id": "8514", "name": "listContainers", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8493", + "$id": "8515", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8494", + "$id": "8516", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -112765,7 +113223,7 @@ "readOnly": false }, { - "$id": "8495", + "$id": "8517", "kind": "query", "name": "order", "serializedName": "order", @@ -112782,13 +113240,13 @@ "readOnly": false }, { - "$id": "8496", + "$id": "8518", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8497", + "$id": "8519", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112803,7 +113261,7 @@ "readOnly": false }, { - "$id": "8498", + "$id": "8520", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112845,13 +113303,13 @@ }, "parameters": [ { - "$id": "8499", + "$id": "8521", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8500", + "$id": "8522", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -112867,7 +113325,7 @@ "decorators": [] }, { - "$id": "8501", + "$id": "8523", "kind": "method", "name": "order", "serializedName": "order", @@ -112885,13 +113343,13 @@ "decorators": [] }, { - "$id": "8502", + "$id": "8524", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8503", + "$id": "8525", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -112907,7 +113365,7 @@ "decorators": [] }, { - "$id": "8504", + "$id": "8526", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -112942,7 +113400,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8496" + "$ref": "8518" }, "responseSegments": [ "last_id" @@ -112952,19 +113410,19 @@ } }, { - "$id": "8505", + "$id": "8527", "kind": "basic", "name": "createContainer", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8506", + "$id": "8528", "name": "createContainer", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8507", + "$id": "8529", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -112981,7 +113439,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainer.contentType" }, { - "$id": "8508", + "$id": "8530", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -112997,7 +113455,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainer.accept" }, { - "$id": "8509", + "$id": "8531", "kind": "body", "name": "body", "serializedName": "body", @@ -113045,7 +113503,7 @@ }, "parameters": [ { - "$id": "8510", + "$id": "8532", "kind": "method", "name": "body", "serializedName": "body", @@ -113062,7 +113520,7 @@ "decorators": [] }, { - "$id": "8511", + "$id": "8533", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -113080,7 +113538,7 @@ "decorators": [] }, { - "$id": "8512", + "$id": "8534", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113108,24 +113566,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainer" }, { - "$id": "8513", + "$id": "8535", "kind": "basic", "name": "GetContainer", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8514", + "$id": "8536", "name": "GetContainer", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8515", + "$id": "8537", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8516", + "$id": "8538", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113143,7 +113601,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainer.container_id" }, { - "$id": "8517", + "$id": "8539", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -113185,12 +113643,12 @@ }, "parameters": [ { - "$id": "8518", + "$id": "8540", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8519", + "$id": "8541", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113206,7 +113664,7 @@ "decorators": [] }, { - "$id": "8520", + "$id": "8542", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113234,24 +113692,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainer" }, { - "$id": "8521", + "$id": "8543", "kind": "basic", "name": "deleteContainer", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8522", + "$id": "8544", "name": "deleteContainer", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8523", + "$id": "8545", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8524", + "$id": "8546", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113269,7 +113727,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.deleteContainer.container_id" }, { - "$id": "8525", + "$id": "8547", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -113311,12 +113769,12 @@ }, "parameters": [ { - "$id": "8526", + "$id": "8548", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8527", + "$id": "8549", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113332,7 +113790,7 @@ "decorators": [] }, { - "$id": "8528", + "$id": "8550", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113360,24 +113818,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.deleteContainer" }, { - "$id": "8529", + "$id": "8551", "kind": "basic", "name": "createContainerFile", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8530", + "$id": "8552", "name": "createContainerFile", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8531", + "$id": "8553", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8532", + "$id": "8554", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113395,7 +113853,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainerFile.container_id" }, { - "$id": "8533", + "$id": "8555", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -113411,7 +113869,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainerFile.contentType" }, { - "$id": "8534", + "$id": "8556", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -113427,7 +113885,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainerFile.accept" }, { - "$id": "8535", + "$id": "8557", "kind": "body", "name": "body", "serializedName": "body", @@ -113475,12 +113933,12 @@ }, "parameters": [ { - "$id": "8536", + "$id": "8558", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8537", + "$id": "8559", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113496,7 +113954,7 @@ "decorators": [] }, { - "$id": "8538", + "$id": "8560", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -113513,7 +113971,7 @@ "decorators": [] }, { - "$id": "8539", + "$id": "8561", "kind": "method", "name": "body", "serializedName": "body", @@ -113530,7 +113988,7 @@ "decorators": [] }, { - "$id": "8540", + "$id": "8562", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113558,24 +114016,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.createContainerFile" }, { - "$id": "8541", + "$id": "8563", "kind": "paging", "name": "listContainerFiles", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8542", + "$id": "8564", "name": "listContainerFiles", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8543", + "$id": "8565", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8544", + "$id": "8566", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113593,13 +114051,13 @@ "crossLanguageDefinitionId": "OpenAI.Containers.listContainerFiles.container_id" }, { - "$id": "8545", + "$id": "8567", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8546", + "$id": "8568", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -113614,7 +114072,7 @@ "readOnly": false }, { - "$id": "8547", + "$id": "8569", "kind": "query", "name": "order", "serializedName": "order", @@ -113631,13 +114089,13 @@ "readOnly": false }, { - "$id": "8548", + "$id": "8570", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8549", + "$id": "8571", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113652,7 +114110,7 @@ "readOnly": false }, { - "$id": "8550", + "$id": "8572", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -113694,12 +114152,12 @@ }, "parameters": [ { - "$id": "8551", + "$id": "8573", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8552", + "$id": "8574", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113715,13 +114173,13 @@ "decorators": [] }, { - "$id": "8553", + "$id": "8575", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8554", + "$id": "8576", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -113737,7 +114195,7 @@ "decorators": [] }, { - "$id": "8555", + "$id": "8577", "kind": "method", "name": "order", "serializedName": "order", @@ -113755,13 +114213,13 @@ "decorators": [] }, { - "$id": "8556", + "$id": "8578", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8557", + "$id": "8579", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113777,7 +114235,7 @@ "decorators": [] }, { - "$id": "8558", + "$id": "8580", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113812,7 +114270,7 @@ ], "continuationToken": { "parameter": { - "$ref": "8548" + "$ref": "8570" }, "responseSegments": [ "last_id" @@ -113822,24 +114280,24 @@ } }, { - "$id": "8559", + "$id": "8581", "kind": "basic", "name": "GetContainerFile", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8560", + "$id": "8582", "name": "GetContainerFile", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8561", + "$id": "8583", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8562", + "$id": "8584", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113857,12 +114315,12 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainerFile.container_id" }, { - "$id": "8563", + "$id": "8585", "kind": "path", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8564", + "$id": "8586", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113880,7 +114338,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainerFile.file_id" }, { - "$id": "8565", + "$id": "8587", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -113922,12 +114380,12 @@ }, "parameters": [ { - "$id": "8566", + "$id": "8588", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8567", + "$id": "8589", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113943,12 +114401,12 @@ "decorators": [] }, { - "$id": "8568", + "$id": "8590", "kind": "method", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8569", + "$id": "8591", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -113964,7 +114422,7 @@ "decorators": [] }, { - "$id": "8570", + "$id": "8592", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -113992,24 +114450,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainerFile" }, { - "$id": "8571", + "$id": "8593", "kind": "basic", "name": "deleteContainerFile", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8572", + "$id": "8594", "name": "deleteContainerFile", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8573", + "$id": "8595", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8574", + "$id": "8596", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114027,12 +114485,12 @@ "crossLanguageDefinitionId": "OpenAI.Containers.deleteContainerFile.container_id" }, { - "$id": "8575", + "$id": "8597", "kind": "path", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8576", + "$id": "8598", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114050,7 +114508,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.deleteContainerFile.file_id" }, { - "$id": "8577", + "$id": "8599", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114092,12 +114550,12 @@ }, "parameters": [ { - "$id": "8578", + "$id": "8600", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8579", + "$id": "8601", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114113,12 +114571,12 @@ "decorators": [] }, { - "$id": "8580", + "$id": "8602", "kind": "method", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8581", + "$id": "8603", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114134,7 +114592,7 @@ "decorators": [] }, { - "$id": "8582", + "$id": "8604", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114162,24 +114620,24 @@ "crossLanguageDefinitionId": "OpenAI.Containers.deleteContainerFile" }, { - "$id": "8583", + "$id": "8605", "kind": "basic", "name": "DownloadContainerFile", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "8584", + "$id": "8606", "name": "DownloadContainerFile", "resourceName": "Containers", "accessibility": "public", "parameters": [ { - "$id": "8585", + "$id": "8607", "kind": "path", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8586", + "$id": "8608", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114197,12 +114655,12 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainerFileContent.container_id" }, { - "$id": "8587", + "$id": "8609", "kind": "path", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8588", + "$id": "8610", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114220,7 +114678,7 @@ "crossLanguageDefinitionId": "OpenAI.Containers.retrieveContainerFileContent.file_id" }, { - "$id": "8589", + "$id": "8611", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114242,7 +114700,7 @@ 200 ], "bodyType": { - "$id": "8590", + "$id": "8612", "kind": "bytes", "name": "bytes", "encode": "base64", @@ -114267,12 +114725,12 @@ }, "parameters": [ { - "$id": "8591", + "$id": "8613", "kind": "method", "name": "container_id", "serializedName": "container_id", "type": { - "$id": "8592", + "$id": "8614", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114288,12 +114746,12 @@ "decorators": [] }, { - "$id": "8593", + "$id": "8615", "kind": "method", "name": "file_id", "serializedName": "file_id", "type": { - "$id": "8594", + "$id": "8616", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114309,7 +114767,7 @@ "decorators": [] }, { - "$id": "8595", + "$id": "8617", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114328,7 +114786,7 @@ ], "response": { "type": { - "$ref": "8590" + "$ref": "8612" } }, "isOverride": false, @@ -114339,13 +114797,13 @@ ], "parameters": [ { - "$id": "8596", + "$id": "8618", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8597", + "$id": "8619", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -114356,7 +114814,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8598", + "$id": "8620", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -114374,31 +114832,31 @@ "crossLanguageDefinitionId": "OpenAI.Containers", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8599", + "$id": "8621", "kind": "client", "name": "Embeddings", "namespace": "OpenAI", "methods": [ { - "$id": "8600", + "$id": "8622", "kind": "basic", "name": "GenerateEmbeddings", "accessibility": "public", "apiVersions": [], "summary": "Creates an embedding vector representing the input text.", "operation": { - "$id": "8601", + "$id": "8623", "name": "GenerateEmbeddings", "resourceName": "Embeddings", "summary": "Creates an embedding vector representing the input text.", "accessibility": "public", "parameters": [ { - "$id": "8602", + "$id": "8624", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114414,7 +114872,7 @@ "crossLanguageDefinitionId": "OpenAI.Embeddings.createEmbedding.accept" }, { - "$id": "8603", + "$id": "8625", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -114431,7 +114889,7 @@ "crossLanguageDefinitionId": "OpenAI.Embeddings.createEmbedding.contentType" }, { - "$id": "8604", + "$id": "8626", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -114479,7 +114937,7 @@ }, "parameters": [ { - "$id": "8605", + "$id": "8627", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114496,7 +114954,7 @@ "decorators": [] }, { - "$id": "8606", + "$id": "8628", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -114513,7 +114971,7 @@ "decorators": [] }, { - "$id": "8607", + "$id": "8629", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -114544,13 +115002,13 @@ ], "parameters": [ { - "$id": "8608", + "$id": "8630", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8609", + "$id": "8631", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -114561,7 +115019,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8610", + "$id": "8632", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -114579,31 +115037,31 @@ "crossLanguageDefinitionId": "OpenAI.Embeddings", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8611", + "$id": "8633", "kind": "client", "name": "Files", "namespace": "OpenAI", "methods": [ { - "$id": "8612", + "$id": "8634", "kind": "basic", "name": "GetFiles", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of files that belong to the user's organization.", "operation": { - "$id": "8613", + "$id": "8635", "name": "GetFiles", "resourceName": "Files", "summary": "Returns a list of files that belong to the user's organization.", "accessibility": "public", "parameters": [ { - "$id": "8614", + "$id": "8636", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114619,13 +115077,13 @@ "crossLanguageDefinitionId": "OpenAI.Files.listFiles.accept" }, { - "$id": "8615", + "$id": "8637", "kind": "query", "name": "purpose", "serializedName": "purpose", "doc": "Only return files with the given purpose.", "type": { - "$id": "8616", + "$id": "8638", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114666,7 +115124,7 @@ }, "parameters": [ { - "$id": "8617", + "$id": "8639", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114683,13 +115141,13 @@ "decorators": [] }, { - "$id": "8618", + "$id": "8640", "kind": "method", "name": "purpose", "serializedName": "purpose", "doc": "Only return files with the given purpose.", "type": { - "$id": "8619", + "$id": "8641", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114716,21 +115174,21 @@ "crossLanguageDefinitionId": "OpenAI.Files.listFiles" }, { - "$id": "8620", + "$id": "8642", "kind": "basic", "name": "UploadFile", "accessibility": "public", "apiVersions": [], "summary": "Upload a file that can be used across various endpoints. The size of all the files uploaded by\none organization can be up to 100 GB.\n\nThe size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants. See\nthe [Assistants Tools guide](/docs/assistants/tools) to learn more about the types of files\nsupported. The Fine-tuning API only supports `.jsonl` files.\n\nPlease [contact us](https://help.openai.com/) if you need to increase these storage limits.", "operation": { - "$id": "8621", + "$id": "8643", "name": "UploadFile", "resourceName": "Files", "summary": "Upload a file that can be used across various endpoints. The size of all the files uploaded by\none organization can be up to 100 GB.\n\nThe size of individual files can be a maximum of 512 MB or 2 million tokens for Assistants. See\nthe [Assistants Tools guide](/docs/assistants/tools) to learn more about the types of files\nsupported. The Fine-tuning API only supports `.jsonl` files.\n\nPlease [contact us](https://help.openai.com/) if you need to increase these storage limits.", "accessibility": "public", "parameters": [ { - "$id": "8622", + "$id": "8644", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114746,7 +115204,7 @@ "crossLanguageDefinitionId": "OpenAI.Files.createFile.accept" }, { - "$id": "8623", + "$id": "8645", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -114762,7 +115220,7 @@ "crossLanguageDefinitionId": "OpenAI.Files.createFile.contentType" }, { - "$id": "8624", + "$id": "8646", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -114810,7 +115268,7 @@ }, "parameters": [ { - "$id": "8625", + "$id": "8647", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114827,7 +115285,7 @@ "decorators": [] }, { - "$id": "8626", + "$id": "8648", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -114844,7 +115302,7 @@ "decorators": [] }, { - "$id": "8627", + "$id": "8649", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -114872,21 +115330,21 @@ "crossLanguageDefinitionId": "OpenAI.Files.createFile" }, { - "$id": "8628", + "$id": "8650", "kind": "basic", "name": "deleteFile", "accessibility": "public", "apiVersions": [], "summary": "Delete a file", "operation": { - "$id": "8629", + "$id": "8651", "name": "deleteFile", "resourceName": "Files", "summary": "Delete a file", "accessibility": "public", "parameters": [ { - "$id": "8630", + "$id": "8652", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -114902,13 +115360,13 @@ "crossLanguageDefinitionId": "OpenAI.Files.deleteFile.accept" }, { - "$id": "8631", + "$id": "8653", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8632", + "$id": "8654", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -114952,7 +115410,7 @@ }, "parameters": [ { - "$id": "8633", + "$id": "8655", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -114969,13 +115427,13 @@ "decorators": [] }, { - "$id": "8634", + "$id": "8656", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8635", + "$id": "8657", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115002,21 +115460,21 @@ "crossLanguageDefinitionId": "OpenAI.Files.deleteFile" }, { - "$id": "8636", + "$id": "8658", "kind": "basic", "name": "GetFile", "accessibility": "public", "apiVersions": [], "summary": "Returns information about a specific file.", "operation": { - "$id": "8637", + "$id": "8659", "name": "GetFile", "resourceName": "Files", "summary": "Returns information about a specific file.", "accessibility": "public", "parameters": [ { - "$id": "8638", + "$id": "8660", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -115032,13 +115490,13 @@ "crossLanguageDefinitionId": "OpenAI.Files.retrieveFile.accept" }, { - "$id": "8639", + "$id": "8661", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8640", + "$id": "8662", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115082,7 +115540,7 @@ }, "parameters": [ { - "$id": "8641", + "$id": "8663", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -115099,13 +115557,13 @@ "decorators": [] }, { - "$id": "8642", + "$id": "8664", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8643", + "$id": "8665", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115132,21 +115590,21 @@ "crossLanguageDefinitionId": "OpenAI.Files.retrieveFile" }, { - "$id": "8644", + "$id": "8666", "kind": "basic", "name": "downloadFile", "accessibility": "public", "apiVersions": [], "summary": "Returns the contents of the specified file.", "operation": { - "$id": "8645", + "$id": "8667", "name": "downloadFile", "resourceName": "Files", "summary": "Returns the contents of the specified file.", "accessibility": "public", "parameters": [ { - "$id": "8646", + "$id": "8668", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -115162,13 +115620,13 @@ "crossLanguageDefinitionId": "OpenAI.Files.downloadFile.accept" }, { - "$id": "8647", + "$id": "8669", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8648", + "$id": "8670", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115192,7 +115650,7 @@ 200 ], "bodyType": { - "$id": "8649", + "$id": "8671", "kind": "bytes", "name": "bytes", "encode": "base64", @@ -115217,7 +115675,7 @@ }, "parameters": [ { - "$id": "8650", + "$id": "8672", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -115234,13 +115692,13 @@ "decorators": [] }, { - "$id": "8651", + "$id": "8673", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to use for this request.", "type": { - "$id": "8652", + "$id": "8674", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115258,7 +115716,7 @@ ], "response": { "type": { - "$ref": "8649" + "$ref": "8671" } }, "isOverride": false, @@ -115269,13 +115727,13 @@ ], "parameters": [ { - "$id": "8653", + "$id": "8675", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8654", + "$id": "8676", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -115286,7 +115744,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8655", + "$id": "8677", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -115304,37 +115762,37 @@ "crossLanguageDefinitionId": "OpenAI.Files", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8656", + "$id": "8678", "kind": "client", "name": "FineTuning", "namespace": "OpenAI", "methods": [ { - "$id": "8657", + "$id": "8679", "kind": "basic", "name": "listFineTuningCheckpointPermissions", "accessibility": "public", "apiVersions": [], "summary": "NOTE: This endpoint requires an admin API key.\nOrganization owners can use this endpoint to view all permissions for a fine-tuned model checkpoint.", "operation": { - "$id": "8658", + "$id": "8680", "name": "listFineTuningCheckpointPermissions", "resourceName": "FineTuning", "summary": "NOTE: This endpoint requires an admin API key.\nOrganization owners can use this endpoint to view all permissions for a fine-tuned model checkpoint.", "accessibility": "public", "parameters": [ { - "$id": "8659", + "$id": "8681", "kind": "path", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to get permissions for.", "type": { - "$id": "8660", + "$id": "8682", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115352,13 +115810,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningCheckpointPermissions.fine_tuned_model_checkpoint" }, { - "$id": "8661", + "$id": "8683", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8662", + "$id": "8684", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115373,13 +115831,13 @@ "readOnly": false }, { - "$id": "8663", + "$id": "8685", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8664", + "$id": "8686", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -115394,7 +115852,7 @@ "readOnly": false }, { - "$id": "8665", + "$id": "8687", "kind": "query", "name": "order", "serializedName": "order", @@ -115411,13 +115869,13 @@ "readOnly": false }, { - "$id": "8666", + "$id": "8688", "kind": "query", "name": "project_id", "serializedName": "project_id", "doc": "The ID of the project to get permissions for.", "type": { - "$id": "8667", + "$id": "8689", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115432,7 +115890,7 @@ "readOnly": false }, { - "$id": "8668", + "$id": "8690", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -115474,13 +115932,13 @@ }, "parameters": [ { - "$id": "8669", + "$id": "8691", "kind": "method", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to get permissions for.", "type": { - "$id": "8670", + "$id": "8692", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115496,13 +115954,13 @@ "decorators": [] }, { - "$id": "8671", + "$id": "8693", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "8672", + "$id": "8694", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115518,13 +115976,13 @@ "decorators": [] }, { - "$id": "8673", + "$id": "8695", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "8674", + "$id": "8696", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -115540,7 +115998,7 @@ "decorators": [] }, { - "$id": "8675", + "$id": "8697", "kind": "method", "name": "order", "serializedName": "order", @@ -115558,13 +116016,13 @@ "decorators": [] }, { - "$id": "8676", + "$id": "8698", "kind": "method", "name": "project_id", "serializedName": "project_id", "doc": "The ID of the project to get permissions for.", "type": { - "$id": "8677", + "$id": "8699", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115580,7 +116038,7 @@ "decorators": [] }, { - "$id": "8678", + "$id": "8700", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -115608,27 +116066,27 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningCheckpointPermissions" }, { - "$id": "8679", + "$id": "8701", "kind": "basic", "name": "createFineTuningCheckpointPermission", "accessibility": "public", "apiVersions": [], "summary": "NOTE: This endpoint requires an admin API key.\nThis enables organization owners to share fine-tuned models with other projects in their organization.", "operation": { - "$id": "8680", + "$id": "8702", "name": "createFineTuningCheckpointPermission", "resourceName": "FineTuning", "summary": "NOTE: This endpoint requires an admin API key.\nThis enables organization owners to share fine-tuned models with other projects in their organization.", "accessibility": "public", "parameters": [ { - "$id": "8681", + "$id": "8703", "kind": "path", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to create a permission for.", "type": { - "$id": "8682", + "$id": "8704", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115646,7 +116104,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningCheckpointPermission.fine_tuned_model_checkpoint" }, { - "$id": "8683", + "$id": "8705", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -115663,7 +116121,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningCheckpointPermission.contentType" }, { - "$id": "8684", + "$id": "8706", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -115679,7 +116137,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningCheckpointPermission.accept" }, { - "$id": "8685", + "$id": "8707", "kind": "body", "name": "createFineTuningCheckpointPermissionRequest", "serializedName": "createFineTuningCheckpointPermissionRequest", @@ -115727,13 +116185,13 @@ }, "parameters": [ { - "$id": "8686", + "$id": "8708", "kind": "method", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to create a permission for.", "type": { - "$id": "8687", + "$id": "8709", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115749,7 +116207,7 @@ "decorators": [] }, { - "$id": "8688", + "$id": "8710", "kind": "method", "name": "project_ids", "serializedName": "project_ids", @@ -115767,7 +116225,7 @@ "decorators": [] }, { - "$id": "8689", + "$id": "8711", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -115785,7 +116243,7 @@ "decorators": [] }, { - "$id": "8690", + "$id": "8712", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -115813,27 +116271,27 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningCheckpointPermission" }, { - "$id": "8691", + "$id": "8713", "kind": "basic", "name": "deleteFineTuningCheckpointPermission", "accessibility": "public", "apiVersions": [], "summary": "NOTE: This endpoint requires an admin API key.\nOrganization owners can use this endpoint to delete a permission for a fine-tuned model checkpoint.", "operation": { - "$id": "8692", + "$id": "8714", "name": "deleteFineTuningCheckpointPermission", "resourceName": "FineTuning", "summary": "NOTE: This endpoint requires an admin API key.\nOrganization owners can use this endpoint to delete a permission for a fine-tuned model checkpoint.", "accessibility": "public", "parameters": [ { - "$id": "8693", + "$id": "8715", "kind": "path", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to delete a permission for.", "type": { - "$id": "8694", + "$id": "8716", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115851,13 +116309,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.deleteFineTuningCheckpointPermission.fine_tuned_model_checkpoint" }, { - "$id": "8695", + "$id": "8717", "kind": "path", "name": "permission_id", "serializedName": "permission_id", "doc": "The ID of the permission to delete.", "type": { - "$id": "8696", + "$id": "8718", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115875,7 +116333,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.deleteFineTuningCheckpointPermission.permission_id" }, { - "$id": "8697", + "$id": "8719", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -115917,13 +116375,13 @@ }, "parameters": [ { - "$id": "8698", + "$id": "8720", "kind": "method", "name": "fine_tuned_model_checkpoint", "serializedName": "fine_tuned_model_checkpoint", "doc": "The ID of the fine-tuned model checkpoint to delete a permission for.", "type": { - "$id": "8699", + "$id": "8721", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115939,13 +116397,13 @@ "decorators": [] }, { - "$id": "8700", + "$id": "8722", "kind": "method", "name": "permission_id", "serializedName": "permission_id", "doc": "The ID of the permission to delete.", "type": { - "$id": "8701", + "$id": "8723", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -115961,7 +116419,7 @@ "decorators": [] }, { - "$id": "8702", + "$id": "8724", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -115989,21 +116447,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.deleteFineTuningCheckpointPermission" }, { - "$id": "8703", + "$id": "8725", "kind": "basic", "name": "createFineTuningJob", "accessibility": "public", "apiVersions": [], "summary": "Creates a fine-tuning job which begins the process of creating a new model from a given dataset.\n\nResponse includes details of the enqueued job including job status and the name of the fine-tuned models once complete.\n\n[Learn more about fine-tuning](/docs/guides/fine-tuning)", "operation": { - "$id": "8704", + "$id": "8726", "name": "createFineTuningJob", "resourceName": "FineTuning", "summary": "Creates a fine-tuning job which begins the process of creating a new model from a given dataset.\n\nResponse includes details of the enqueued job including job status and the name of the fine-tuned models once complete.\n\n[Learn more about fine-tuning](/docs/guides/fine-tuning)", "accessibility": "public", "parameters": [ { - "$id": "8705", + "$id": "8727", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116019,7 +116477,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningJob.accept" }, { - "$id": "8706", + "$id": "8728", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -116036,7 +116494,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningJob.contentType" }, { - "$id": "8707", + "$id": "8729", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -116084,7 +116542,7 @@ }, "parameters": [ { - "$id": "8708", + "$id": "8730", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116101,7 +116559,7 @@ "decorators": [] }, { - "$id": "8709", + "$id": "8731", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -116118,7 +116576,7 @@ "decorators": [] }, { - "$id": "8710", + "$id": "8732", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -116147,21 +116605,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.createFineTuningJob" }, { - "$id": "8711", + "$id": "8733", "kind": "basic", "name": "listPaginatedFineTuningJobs", "accessibility": "public", "apiVersions": [], "summary": "List your organization's fine-tuning jobs", "operation": { - "$id": "8712", + "$id": "8734", "name": "listPaginatedFineTuningJobs", "resourceName": "FineTuning", "summary": "List your organization's fine-tuning jobs", "accessibility": "public", "parameters": [ { - "$id": "8713", + "$id": "8735", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116177,13 +116635,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listPaginatedFineTuningJobs.accept" }, { - "$id": "8714", + "$id": "8736", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last job from the previous pagination request.", "type": { - "$id": "8715", + "$id": "8737", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116198,13 +116656,13 @@ "readOnly": false }, { - "$id": "8716", + "$id": "8738", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "Number of fine-tuning jobs to retrieve.", "type": { - "$id": "8717", + "$id": "8739", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -116245,7 +116703,7 @@ }, "parameters": [ { - "$id": "8718", + "$id": "8740", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116262,13 +116720,13 @@ "decorators": [] }, { - "$id": "8719", + "$id": "8741", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last job from the previous pagination request.", "type": { - "$id": "8720", + "$id": "8742", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116284,13 +116742,13 @@ "decorators": [] }, { - "$id": "8721", + "$id": "8743", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "Number of fine-tuning jobs to retrieve.", "type": { - "$id": "8722", + "$id": "8744", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -116317,21 +116775,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listPaginatedFineTuningJobs" }, { - "$id": "8723", + "$id": "8745", "kind": "basic", "name": "retrieveFineTuningJob", "accessibility": "public", "apiVersions": [], "summary": "Get info about a fine-tuning job.\n\n[Learn more about fine-tuning](/docs/guides/fine-tuning)", "operation": { - "$id": "8724", + "$id": "8746", "name": "retrieveFineTuningJob", "resourceName": "FineTuning", "summary": "Get info about a fine-tuning job.\n\n[Learn more about fine-tuning](/docs/guides/fine-tuning)", "accessibility": "public", "parameters": [ { - "$id": "8725", + "$id": "8747", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116347,13 +116805,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.retrieveFineTuningJob.accept" }, { - "$id": "8726", + "$id": "8748", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job.", "type": { - "$id": "8727", + "$id": "8749", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116397,7 +116855,7 @@ }, "parameters": [ { - "$id": "8728", + "$id": "8750", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116414,13 +116872,13 @@ "decorators": [] }, { - "$id": "8729", + "$id": "8751", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job.", "type": { - "$id": "8730", + "$id": "8752", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116447,21 +116905,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.retrieveFineTuningJob" }, { - "$id": "8731", + "$id": "8753", "kind": "basic", "name": "cancelFineTuningJob", "accessibility": "public", "apiVersions": [], "summary": "Immediately cancel a fine-tune job.", "operation": { - "$id": "8732", + "$id": "8754", "name": "cancelFineTuningJob", "resourceName": "FineTuning", "summary": "Immediately cancel a fine-tune job.", "accessibility": "public", "parameters": [ { - "$id": "8733", + "$id": "8755", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116477,13 +116935,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.cancelFineTuningJob.accept" }, { - "$id": "8734", + "$id": "8756", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to cancel.", "type": { - "$id": "8735", + "$id": "8757", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116527,7 +116985,7 @@ }, "parameters": [ { - "$id": "8736", + "$id": "8758", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116544,13 +117002,13 @@ "decorators": [] }, { - "$id": "8737", + "$id": "8759", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to cancel.", "type": { - "$id": "8738", + "$id": "8760", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116577,21 +117035,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.cancelFineTuningJob" }, { - "$id": "8739", + "$id": "8761", "kind": "basic", "name": "listFineTuningJobCheckpoints", "accessibility": "public", "apiVersions": [], "summary": "List the checkpoints for a fine-tuning job.", "operation": { - "$id": "8740", + "$id": "8762", "name": "listFineTuningJobCheckpoints", "resourceName": "FineTuning", "summary": "List the checkpoints for a fine-tuning job.", "accessibility": "public", "parameters": [ { - "$id": "8741", + "$id": "8763", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116607,13 +117065,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningJobCheckpoints.accept" }, { - "$id": "8742", + "$id": "8764", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to get checkpoints for.", "type": { - "$id": "8743", + "$id": "8765", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116631,13 +117089,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningJobCheckpoints.fine_tuning_job_id" }, { - "$id": "8744", + "$id": "8766", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last checkpoint ID from the previous pagination request.", "type": { - "$id": "8745", + "$id": "8767", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116652,13 +117110,13 @@ "readOnly": false }, { - "$id": "8746", + "$id": "8768", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "Number of checkpoints to retrieve.", "type": { - "$id": "8747", + "$id": "8769", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -116699,7 +117157,7 @@ }, "parameters": [ { - "$id": "8748", + "$id": "8770", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116716,13 +117174,13 @@ "decorators": [] }, { - "$id": "8749", + "$id": "8771", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to get checkpoints for.", "type": { - "$id": "8750", + "$id": "8772", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116738,13 +117196,13 @@ "decorators": [] }, { - "$id": "8751", + "$id": "8773", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last checkpoint ID from the previous pagination request.", "type": { - "$id": "8752", + "$id": "8774", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116760,13 +117218,13 @@ "decorators": [] }, { - "$id": "8753", + "$id": "8775", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "Number of checkpoints to retrieve.", "type": { - "$id": "8754", + "$id": "8776", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -116793,21 +117251,21 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningJobCheckpoints" }, { - "$id": "8755", + "$id": "8777", "kind": "basic", "name": "listFineTuningEvents", "accessibility": "public", "apiVersions": [], "summary": "Get status updates for a fine-tuning job.", "operation": { - "$id": "8756", + "$id": "8778", "name": "listFineTuningEvents", "resourceName": "FineTuning", "summary": "Get status updates for a fine-tuning job.", "accessibility": "public", "parameters": [ { - "$id": "8757", + "$id": "8779", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -116823,13 +117281,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningEvents.accept" }, { - "$id": "8758", + "$id": "8780", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to get events for.", "type": { - "$id": "8759", + "$id": "8781", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116847,13 +117305,13 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningEvents.fine_tuning_job_id" }, { - "$id": "8760", + "$id": "8782", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last event from the previous pagination request.", "type": { - "$id": "8761", + "$id": "8783", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116868,13 +117326,13 @@ "readOnly": false }, { - "$id": "8762", + "$id": "8784", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "Number of events to retrieve.", "type": { - "$id": "8763", + "$id": "8785", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -116915,7 +117373,7 @@ }, "parameters": [ { - "$id": "8764", + "$id": "8786", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -116932,13 +117390,13 @@ "decorators": [] }, { - "$id": "8765", + "$id": "8787", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to get events for.", "type": { - "$id": "8766", + "$id": "8788", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116954,13 +117412,13 @@ "decorators": [] }, { - "$id": "8767", + "$id": "8789", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last event from the previous pagination request.", "type": { - "$id": "8768", + "$id": "8790", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -116976,13 +117434,13 @@ "decorators": [] }, { - "$id": "8769", + "$id": "8791", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "Number of events to retrieve.", "type": { - "$id": "8770", + "$id": "8792", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -117009,27 +117467,27 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.listFineTuningEvents" }, { - "$id": "8771", + "$id": "8793", "kind": "basic", "name": "pauseFineTuningJob", "accessibility": "public", "apiVersions": [], "summary": "Pause a fine-tune job.", "operation": { - "$id": "8772", + "$id": "8794", "name": "pauseFineTuningJob", "resourceName": "FineTuning", "summary": "Pause a fine-tune job.", "accessibility": "public", "parameters": [ { - "$id": "8773", + "$id": "8795", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to pause.", "type": { - "$id": "8774", + "$id": "8796", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117047,7 +117505,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.pauseFineTuningJob.fine_tuning_job_id" }, { - "$id": "8775", + "$id": "8797", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117089,13 +117547,13 @@ }, "parameters": [ { - "$id": "8776", + "$id": "8798", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to pause.", "type": { - "$id": "8777", + "$id": "8799", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117111,7 +117569,7 @@ "decorators": [] }, { - "$id": "8778", + "$id": "8800", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -117139,27 +117597,27 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.pauseFineTuningJob" }, { - "$id": "8779", + "$id": "8801", "kind": "basic", "name": "resumeFineTuningJob", "accessibility": "public", "apiVersions": [], "summary": "Resume a paused fine-tune job.", "operation": { - "$id": "8780", + "$id": "8802", "name": "resumeFineTuningJob", "resourceName": "FineTuning", "summary": "Resume a paused fine-tune job.", "accessibility": "public", "parameters": [ { - "$id": "8781", + "$id": "8803", "kind": "path", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to resume.", "type": { - "$id": "8782", + "$id": "8804", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117177,7 +117635,7 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning.resumeFineTuningJob.fine_tuning_job_id" }, { - "$id": "8783", + "$id": "8805", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117219,13 +117677,13 @@ }, "parameters": [ { - "$id": "8784", + "$id": "8806", "kind": "method", "name": "fine_tuning_job_id", "serializedName": "fine_tuning_job_id", "doc": "The ID of the fine-tuning job to resume.", "type": { - "$id": "8785", + "$id": "8807", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117241,7 +117699,7 @@ "decorators": [] }, { - "$id": "8786", + "$id": "8808", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -117271,13 +117729,13 @@ ], "parameters": [ { - "$id": "8787", + "$id": "8809", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8788", + "$id": "8810", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -117288,7 +117746,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8789", + "$id": "8811", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -117306,31 +117764,31 @@ "crossLanguageDefinitionId": "OpenAI.FineTuning", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8790", + "$id": "8812", "kind": "client", "name": "Graders", "namespace": "OpenAI", "methods": [ { - "$id": "8791", + "$id": "8813", "kind": "basic", "name": "runGrader", "accessibility": "public", "apiVersions": [], "summary": "Run a grader.", "operation": { - "$id": "8792", + "$id": "8814", "name": "runGrader", "resourceName": "Graders", "summary": "Run a grader.", "accessibility": "public", "parameters": [ { - "$id": "8793", + "$id": "8815", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -117347,7 +117805,7 @@ "crossLanguageDefinitionId": "OpenAI.Graders.runGrader.contentType" }, { - "$id": "8794", + "$id": "8816", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117363,7 +117821,7 @@ "crossLanguageDefinitionId": "OpenAI.Graders.runGrader.accept" }, { - "$id": "8795", + "$id": "8817", "kind": "body", "name": "request", "serializedName": "request", @@ -117411,7 +117869,7 @@ }, "parameters": [ { - "$id": "8796", + "$id": "8818", "kind": "method", "name": "request", "serializedName": "request", @@ -117428,7 +117886,7 @@ "decorators": [] }, { - "$id": "8797", + "$id": "8819", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -117446,7 +117904,7 @@ "decorators": [] }, { - "$id": "8798", + "$id": "8820", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -117474,21 +117932,21 @@ "crossLanguageDefinitionId": "OpenAI.Graders.runGrader" }, { - "$id": "8799", + "$id": "8821", "kind": "basic", "name": "validateGrader", "accessibility": "public", "apiVersions": [], "summary": "Validate a grader.", "operation": { - "$id": "8800", + "$id": "8822", "name": "validateGrader", "resourceName": "Graders", "summary": "Validate a grader.", "accessibility": "public", "parameters": [ { - "$id": "8801", + "$id": "8823", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -117505,7 +117963,7 @@ "crossLanguageDefinitionId": "OpenAI.Graders.validateGrader.contentType" }, { - "$id": "8802", + "$id": "8824", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117521,7 +117979,7 @@ "crossLanguageDefinitionId": "OpenAI.Graders.validateGrader.accept" }, { - "$id": "8803", + "$id": "8825", "kind": "body", "name": "request", "serializedName": "request", @@ -117569,7 +118027,7 @@ }, "parameters": [ { - "$id": "8804", + "$id": "8826", "kind": "method", "name": "request", "serializedName": "request", @@ -117586,7 +118044,7 @@ "decorators": [] }, { - "$id": "8805", + "$id": "8827", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -117604,7 +118062,7 @@ "decorators": [] }, { - "$id": "8806", + "$id": "8828", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -117634,13 +118092,13 @@ ], "parameters": [ { - "$id": "8807", + "$id": "8829", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8808", + "$id": "8830", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -117651,7 +118109,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8809", + "$id": "8831", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -117669,37 +118127,37 @@ "crossLanguageDefinitionId": "OpenAI.Graders", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8810", + "$id": "8832", "kind": "client", "name": "Evals", "namespace": "OpenAI", "methods": [ { - "$id": "8811", + "$id": "8833", "kind": "basic", "name": "listEvals", "accessibility": "public", "apiVersions": [], "summary": "List evaluations for a project.", "operation": { - "$id": "8812", + "$id": "8834", "name": "listEvals", "resourceName": "Evals", "summary": "List evaluations for a project.", "accessibility": "public", "parameters": [ { - "$id": "8813", + "$id": "8835", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last eval from the previous pagination request.", "type": { - "$id": "8814", + "$id": "8836", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117714,13 +118172,13 @@ "readOnly": false }, { - "$id": "8815", + "$id": "8837", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of evals to be returned in a single pagination response.", "type": { - "$id": "8816", + "$id": "8838", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -117735,7 +118193,7 @@ "readOnly": false }, { - "$id": "8817", + "$id": "8839", "kind": "query", "name": "order", "serializedName": "order", @@ -117752,7 +118210,7 @@ "readOnly": false }, { - "$id": "8818", + "$id": "8840", "kind": "query", "name": "order_by", "serializedName": "order_by", @@ -117769,7 +118227,7 @@ "readOnly": false }, { - "$id": "8819", + "$id": "8841", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117811,13 +118269,13 @@ }, "parameters": [ { - "$id": "8820", + "$id": "8842", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last eval from the previous pagination request.", "type": { - "$id": "8821", + "$id": "8843", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -117833,13 +118291,13 @@ "decorators": [] }, { - "$id": "8822", + "$id": "8844", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of evals to be returned in a single pagination response.", "type": { - "$id": "8823", + "$id": "8845", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -117855,7 +118313,7 @@ "decorators": [] }, { - "$id": "8824", + "$id": "8846", "kind": "method", "name": "order", "serializedName": "order", @@ -117873,7 +118331,7 @@ "decorators": [] }, { - "$id": "8825", + "$id": "8847", "kind": "method", "name": "order_by", "serializedName": "order_by", @@ -117891,7 +118349,7 @@ "decorators": [] }, { - "$id": "8826", + "$id": "8848", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -117919,21 +118377,21 @@ "crossLanguageDefinitionId": "OpenAI.Evals.listEvals" }, { - "$id": "8827", + "$id": "8849", "kind": "basic", "name": "createEval", "accessibility": "public", "apiVersions": [], "doc": "Create the structure of an evaluation that can be used to test a model's\nperformance.\n\nAn evaluation is a set of testing criteria and a datasource. After\ncreating an evaluation, you can run it on different models and model\nparameters. We support several types of graders and datasources.\n\nFor more information, see the [Evals guide](/docs/guides/evals).", "operation": { - "$id": "8828", + "$id": "8850", "name": "createEval", "resourceName": "Evals", "doc": "Create the structure of an evaluation that can be used to test a model's\nperformance.\n\nAn evaluation is a set of testing criteria and a datasource. After\ncreating an evaluation, you can run it on different models and model\nparameters. We support several types of graders and datasources.\n\nFor more information, see the [Evals guide](/docs/guides/evals).", "accessibility": "public", "parameters": [ { - "$id": "8829", + "$id": "8851", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -117950,7 +118408,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEval.contentType" }, { - "$id": "8830", + "$id": "8852", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -117966,7 +118424,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEval.accept" }, { - "$id": "8831", + "$id": "8853", "kind": "body", "name": "body", "serializedName": "body", @@ -118014,7 +118472,7 @@ }, "parameters": [ { - "$id": "8832", + "$id": "8854", "kind": "method", "name": "body", "serializedName": "body", @@ -118031,7 +118489,7 @@ "decorators": [] }, { - "$id": "8833", + "$id": "8855", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -118049,7 +118507,7 @@ "decorators": [] }, { - "$id": "8834", + "$id": "8856", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -118077,26 +118535,26 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEval" }, { - "$id": "8835", + "$id": "8857", "kind": "basic", "name": "getEval", "accessibility": "public", "apiVersions": [], "summary": "Retrieve an evaluation by its ID.", "operation": { - "$id": "8836", + "$id": "8858", "name": "getEval", "resourceName": "Evals", "summary": "Retrieve an evaluation by its ID.", "accessibility": "public", "parameters": [ { - "$id": "8837", + "$id": "8859", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "type": { - "$id": "8838", + "$id": "8860", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118114,7 +118572,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEval.eval_id" }, { - "$id": "8839", + "$id": "8861", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -118156,12 +118614,12 @@ }, "parameters": [ { - "$id": "8840", + "$id": "8862", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "type": { - "$id": "8841", + "$id": "8863", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118177,7 +118635,7 @@ "decorators": [] }, { - "$id": "8842", + "$id": "8864", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -118205,27 +118663,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEval" }, { - "$id": "8843", + "$id": "8865", "kind": "basic", "name": "updateEval", "accessibility": "public", "apiVersions": [], "doc": "Update select, mutable properties of a specified evaluation.", "operation": { - "$id": "8844", + "$id": "8866", "name": "updateEval", "resourceName": "Evals", "doc": "Update select, mutable properties of a specified evaluation.", "accessibility": "public", "parameters": [ { - "$id": "8845", + "$id": "8867", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to update.", "type": { - "$id": "8846", + "$id": "8868", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118243,7 +118701,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.updateEval.eval_id" }, { - "$id": "8847", + "$id": "8869", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -118260,7 +118718,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.updateEval.contentType" }, { - "$id": "8848", + "$id": "8870", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -118276,7 +118734,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.updateEval.accept" }, { - "$id": "8849", + "$id": "8871", "kind": "body", "name": "body", "serializedName": "body", @@ -118324,13 +118782,13 @@ }, "parameters": [ { - "$id": "8850", + "$id": "8872", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to update.", "type": { - "$id": "8851", + "$id": "8873", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118346,7 +118804,7 @@ "decorators": [] }, { - "$id": "8852", + "$id": "8874", "kind": "method", "name": "body", "serializedName": "body", @@ -118363,7 +118821,7 @@ "decorators": [] }, { - "$id": "8853", + "$id": "8875", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -118381,7 +118839,7 @@ "decorators": [] }, { - "$id": "8854", + "$id": "8876", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -118409,27 +118867,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.updateEval" }, { - "$id": "8855", + "$id": "8877", "kind": "basic", "name": "deleteEval", "accessibility": "public", "apiVersions": [], "doc": "Delete a specified evaluation.", "operation": { - "$id": "8856", + "$id": "8878", "name": "deleteEval", "resourceName": "Evals", "doc": "Delete a specified evaluation.", "accessibility": "public", "parameters": [ { - "$id": "8857", + "$id": "8879", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to delete.", "type": { - "$id": "8858", + "$id": "8880", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118447,7 +118905,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.deleteEval.eval_id" }, { - "$id": "8859", + "$id": "8881", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -118489,13 +118947,13 @@ }, "parameters": [ { - "$id": "8860", + "$id": "8882", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to delete.", "type": { - "$id": "8861", + "$id": "8883", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118511,7 +118969,7 @@ "decorators": [] }, { - "$id": "8862", + "$id": "8884", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -118539,7 +118997,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.deleteEval" }, { - "$id": "8863", + "$id": "8885", "kind": "basic", "name": "getEvalRuns", "accessibility": "public", @@ -118547,7 +119005,7 @@ "doc": "Retrieve a list of runs for a specified evaluation.", "summary": "", "operation": { - "$id": "8864", + "$id": "8886", "name": "getEvalRuns", "resourceName": "Evals", "summary": "", @@ -118555,13 +119013,13 @@ "accessibility": "public", "parameters": [ { - "$id": "8865", + "$id": "8887", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to retrieve runs for.", "type": { - "$id": "8866", + "$id": "8888", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118579,13 +119037,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRuns.eval_id" }, { - "$id": "8867", + "$id": "8889", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last run from the previous pagination request.", "type": { - "$id": "8868", + "$id": "8890", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118600,13 +119058,13 @@ "readOnly": false }, { - "$id": "8869", + "$id": "8891", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of runs to be returned in a single pagination response.", "type": { - "$id": "8870", + "$id": "8892", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -118621,7 +119079,7 @@ "readOnly": false }, { - "$id": "8871", + "$id": "8893", "kind": "query", "name": "order", "serializedName": "order", @@ -118638,7 +119096,7 @@ "readOnly": false }, { - "$id": "8872", + "$id": "8894", "kind": "query", "name": "status", "serializedName": "status", @@ -118655,7 +119113,7 @@ "readOnly": false }, { - "$id": "8873", + "$id": "8895", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -118697,13 +119155,13 @@ }, "parameters": [ { - "$id": "8874", + "$id": "8896", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to retrieve runs for.", "type": { - "$id": "8875", + "$id": "8897", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118719,13 +119177,13 @@ "decorators": [] }, { - "$id": "8876", + "$id": "8898", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last run from the previous pagination request.", "type": { - "$id": "8877", + "$id": "8899", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118741,13 +119199,13 @@ "decorators": [] }, { - "$id": "8878", + "$id": "8900", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of runs to be returned in a single pagination response.", "type": { - "$id": "8879", + "$id": "8901", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -118763,7 +119221,7 @@ "decorators": [] }, { - "$id": "8880", + "$id": "8902", "kind": "method", "name": "order", "serializedName": "order", @@ -118781,7 +119239,7 @@ "decorators": [] }, { - "$id": "8881", + "$id": "8903", "kind": "method", "name": "status", "serializedName": "status", @@ -118799,7 +119257,7 @@ "decorators": [] }, { - "$id": "8882", + "$id": "8904", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -118827,27 +119285,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRuns" }, { - "$id": "8883", + "$id": "8905", "kind": "basic", "name": "createEvalRun", "accessibility": "public", "apiVersions": [], "doc": "Create a new evaluation run, beginning the grading process.", "operation": { - "$id": "8884", + "$id": "8906", "name": "createEvalRun", "resourceName": "Evals", "doc": "Create a new evaluation run, beginning the grading process.", "accessibility": "public", "parameters": [ { - "$id": "8885", + "$id": "8907", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to run.", "type": { - "$id": "8886", + "$id": "8908", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118865,7 +119323,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEvalRun.eval_id" }, { - "$id": "8887", + "$id": "8909", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -118882,7 +119340,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEvalRun.contentType" }, { - "$id": "8888", + "$id": "8910", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -118898,7 +119356,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEvalRun.accept" }, { - "$id": "8889", + "$id": "8911", "kind": "body", "name": "body", "serializedName": "body", @@ -118946,13 +119404,13 @@ }, "parameters": [ { - "$id": "8890", + "$id": "8912", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation to run.", "type": { - "$id": "8891", + "$id": "8913", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -118968,7 +119426,7 @@ "decorators": [] }, { - "$id": "8892", + "$id": "8914", "kind": "method", "name": "body", "serializedName": "body", @@ -118985,7 +119443,7 @@ "decorators": [] }, { - "$id": "8893", + "$id": "8915", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -119003,7 +119461,7 @@ "decorators": [] }, { - "$id": "8894", + "$id": "8916", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -119031,27 +119489,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.createEvalRun" }, { - "$id": "8895", + "$id": "8917", "kind": "basic", "name": "getEvalRun", "accessibility": "public", "apiVersions": [], "doc": "Retrieve a specific evaluation run by its ID.", "operation": { - "$id": "8896", + "$id": "8918", "name": "getEvalRun", "resourceName": "Evals", "doc": "Retrieve a specific evaluation run by its ID.", "accessibility": "public", "parameters": [ { - "$id": "8897", + "$id": "8919", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8898", + "$id": "8920", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119069,13 +119527,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRun.eval_id" }, { - "$id": "8899", + "$id": "8921", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to retrieve.", "type": { - "$id": "8900", + "$id": "8922", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119093,7 +119551,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRun.run_id" }, { - "$id": "8901", + "$id": "8923", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -119135,13 +119593,13 @@ }, "parameters": [ { - "$id": "8902", + "$id": "8924", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8903", + "$id": "8925", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119157,13 +119615,13 @@ "decorators": [] }, { - "$id": "8904", + "$id": "8926", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to retrieve.", "type": { - "$id": "8905", + "$id": "8927", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119179,7 +119637,7 @@ "decorators": [] }, { - "$id": "8906", + "$id": "8928", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -119207,27 +119665,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRun" }, { - "$id": "8907", + "$id": "8929", "kind": "basic", "name": "cancelEvalRun", "accessibility": "public", "apiVersions": [], "doc": "Cancel a specific evaluation run by its ID.", "operation": { - "$id": "8908", + "$id": "8930", "name": "cancelEvalRun", "resourceName": "Evals", "doc": "Cancel a specific evaluation run by its ID.", "accessibility": "public", "parameters": [ { - "$id": "8909", + "$id": "8931", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8910", + "$id": "8932", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119245,13 +119703,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.cancelEvalRun.eval_id" }, { - "$id": "8911", + "$id": "8933", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to cancel.", "type": { - "$id": "8912", + "$id": "8934", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119269,7 +119727,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.cancelEvalRun.run_id" }, { - "$id": "8913", + "$id": "8935", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -119311,13 +119769,13 @@ }, "parameters": [ { - "$id": "8914", + "$id": "8936", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8915", + "$id": "8937", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119333,13 +119791,13 @@ "decorators": [] }, { - "$id": "8916", + "$id": "8938", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to cancel.", "type": { - "$id": "8917", + "$id": "8939", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119355,7 +119813,7 @@ "decorators": [] }, { - "$id": "8918", + "$id": "8940", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -119383,27 +119841,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.cancelEvalRun" }, { - "$id": "8919", + "$id": "8941", "kind": "basic", "name": "deleteEvalRun", "accessibility": "public", "apiVersions": [], "doc": "Delete a specific evaluation run by its ID.", "operation": { - "$id": "8920", + "$id": "8942", "name": "deleteEvalRun", "resourceName": "Evals", "doc": "Delete a specific evaluation run by its ID.", "accessibility": "public", "parameters": [ { - "$id": "8921", + "$id": "8943", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8922", + "$id": "8944", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119421,13 +119879,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.deleteEvalRun.eval_id" }, { - "$id": "8923", + "$id": "8945", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to delete.", "type": { - "$id": "8924", + "$id": "8946", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119445,7 +119903,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.deleteEvalRun.run_id" }, { - "$id": "8925", + "$id": "8947", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -119487,13 +119945,13 @@ }, "parameters": [ { - "$id": "8926", + "$id": "8948", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8927", + "$id": "8949", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119509,13 +119967,13 @@ "decorators": [] }, { - "$id": "8928", + "$id": "8950", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to delete.", "type": { - "$id": "8929", + "$id": "8951", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119531,7 +119989,7 @@ "decorators": [] }, { - "$id": "8930", + "$id": "8952", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -119559,27 +120017,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.deleteEvalRun" }, { - "$id": "8931", + "$id": "8953", "kind": "basic", "name": "getEvalRunOutputItems", "accessibility": "public", "apiVersions": [], "doc": "Get a list of output items for a specified evaluation run.", "operation": { - "$id": "8932", + "$id": "8954", "name": "getEvalRunOutputItems", "resourceName": "Evals", "doc": "Get a list of output items for a specified evaluation run.", "accessibility": "public", "parameters": [ { - "$id": "8933", + "$id": "8955", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8934", + "$id": "8956", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119597,13 +120055,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItems.eval_id" }, { - "$id": "8935", + "$id": "8957", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to retrieve output items for.", "type": { - "$id": "8936", + "$id": "8958", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119621,13 +120079,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItems.run_id" }, { - "$id": "8937", + "$id": "8959", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last output item from the previous pagination request.", "type": { - "$id": "8938", + "$id": "8960", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119642,13 +120100,13 @@ "readOnly": false }, { - "$id": "8939", + "$id": "8961", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of output items to be returned in a single pagination response.", "type": { - "$id": "8940", + "$id": "8962", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -119663,7 +120121,7 @@ "readOnly": false }, { - "$id": "8941", + "$id": "8963", "kind": "query", "name": "status", "serializedName": "status", @@ -119680,7 +120138,7 @@ "readOnly": false }, { - "$id": "8942", + "$id": "8964", "kind": "query", "name": "order", "serializedName": "order", @@ -119697,7 +120155,7 @@ "readOnly": false }, { - "$id": "8943", + "$id": "8965", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -119739,13 +120197,13 @@ }, "parameters": [ { - "$id": "8944", + "$id": "8966", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8945", + "$id": "8967", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119761,13 +120219,13 @@ "decorators": [] }, { - "$id": "8946", + "$id": "8968", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run to retrieve output items for.", "type": { - "$id": "8947", + "$id": "8969", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119783,13 +120241,13 @@ "decorators": [] }, { - "$id": "8948", + "$id": "8970", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last output item from the previous pagination request.", "type": { - "$id": "8949", + "$id": "8971", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119805,13 +120263,13 @@ "decorators": [] }, { - "$id": "8950", + "$id": "8972", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of output items to be returned in a single pagination response.", "type": { - "$id": "8951", + "$id": "8973", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -119827,7 +120285,7 @@ "decorators": [] }, { - "$id": "8952", + "$id": "8974", "kind": "method", "name": "status", "serializedName": "status", @@ -119845,7 +120303,7 @@ "decorators": [] }, { - "$id": "8953", + "$id": "8975", "kind": "method", "name": "order", "serializedName": "order", @@ -119863,7 +120321,7 @@ "decorators": [] }, { - "$id": "8954", + "$id": "8976", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -119891,27 +120349,27 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItems" }, { - "$id": "8955", + "$id": "8977", "kind": "basic", "name": "getEvalRunOutputItem", "accessibility": "public", "apiVersions": [], "doc": "Retrieve a specific output item from an evaluation run by its ID.", "operation": { - "$id": "8956", + "$id": "8978", "name": "getEvalRunOutputItem", "resourceName": "Evals", "doc": "Retrieve a specific output item from an evaluation run by its ID.", "accessibility": "public", "parameters": [ { - "$id": "8957", + "$id": "8979", "kind": "path", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8958", + "$id": "8980", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119929,13 +120387,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItem.eval_id" }, { - "$id": "8959", + "$id": "8981", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run the output item belongs to.", "type": { - "$id": "8960", + "$id": "8982", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119953,13 +120411,13 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItem.run_id" }, { - "$id": "8961", + "$id": "8983", "kind": "path", "name": "output_item_id", "serializedName": "output_item_id", "doc": "The ID of the output item to retrieve.", "type": { - "$id": "8962", + "$id": "8984", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -119977,7 +120435,7 @@ "crossLanguageDefinitionId": "OpenAI.Evals.getEvalRunOutputItem.output_item_id" }, { - "$id": "8963", + "$id": "8985", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -120019,13 +120477,13 @@ }, "parameters": [ { - "$id": "8964", + "$id": "8986", "kind": "method", "name": "eval_id", "serializedName": "eval_id", "doc": "The ID of the evaluation the run belongs to.", "type": { - "$id": "8965", + "$id": "8987", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120041,13 +120499,13 @@ "decorators": [] }, { - "$id": "8966", + "$id": "8988", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the evaluation run the output item belongs to.", "type": { - "$id": "8967", + "$id": "8989", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120063,13 +120521,13 @@ "decorators": [] }, { - "$id": "8968", + "$id": "8990", "kind": "method", "name": "output_item_id", "serializedName": "output_item_id", "doc": "The ID of the output item to retrieve.", "type": { - "$id": "8969", + "$id": "8991", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120085,7 +120543,7 @@ "decorators": [] }, { - "$id": "8970", + "$id": "8992", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -120115,13 +120573,13 @@ ], "parameters": [ { - "$id": "8971", + "$id": "8993", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "8972", + "$id": "8994", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -120132,7 +120590,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "8973", + "$id": "8995", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -120150,31 +120608,31 @@ "crossLanguageDefinitionId": "OpenAI.Evals", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "8974", + "$id": "8996", "kind": "client", "name": "Responses", "namespace": "OpenAI", "methods": [ { - "$id": "8975", + "$id": "8997", "kind": "basic", "name": "createResponse", "accessibility": "public", "apiVersions": [], "doc": "Creates a model response.", "operation": { - "$id": "8976", + "$id": "8998", "name": "createResponse", "resourceName": "Responses", "doc": "Creates a model response.", "accessibility": "public", "parameters": [ { - "$id": "8977", + "$id": "8999", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -120190,7 +120648,7 @@ "crossLanguageDefinitionId": "OpenAI.Responses.createResponse.accept" }, { - "$id": "8978", + "$id": "9000", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -120207,7 +120665,7 @@ "crossLanguageDefinitionId": "OpenAI.Responses.createResponse.contentType" }, { - "$id": "8979", + "$id": "9001", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -120232,7 +120690,7 @@ 200 ], "bodyType": { - "$id": "8980", + "$id": "9002", "kind": "union", "name": "", "variantTypes": [ @@ -120276,7 +120734,7 @@ }, "parameters": [ { - "$id": "8981", + "$id": "9003", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -120293,7 +120751,7 @@ "decorators": [] }, { - "$id": "8982", + "$id": "9004", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -120310,7 +120768,7 @@ "decorators": [] }, { - "$id": "8983", + "$id": "9005", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -120330,7 +120788,7 @@ ], "response": { "type": { - "$ref": "8980" + "$ref": "9002" } }, "isOverride": false, @@ -120339,27 +120797,27 @@ "crossLanguageDefinitionId": "OpenAI.Responses.createResponse" }, { - "$id": "8984", + "$id": "9006", "kind": "basic", "name": "getResponse", "accessibility": "public", "apiVersions": [], "doc": "Retrieves a model response with the given ID.", "operation": { - "$id": "8985", + "$id": "9007", "name": "getResponse", "resourceName": "Responses", "doc": "Retrieves a model response with the given ID.", "accessibility": "public", "parameters": [ { - "$id": "8986", + "$id": "9008", "kind": "path", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to retrieve.", "type": { - "$id": "8987", + "$id": "9009", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120377,7 +120835,7 @@ "crossLanguageDefinitionId": "OpenAI.Responses.getResponse.response_id" }, { - "$id": "8988", + "$id": "9010", "kind": "query", "name": "includables", "serializedName": "include[]", @@ -120393,13 +120851,13 @@ "readOnly": false }, { - "$id": "8989", + "$id": "9011", "kind": "query", "name": "stream", "serializedName": "stream", "doc": "If set to true, model response data will be streamed to the client as it is generated using server-sent events.", "type": { - "$id": "8990", + "$id": "9012", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -120414,13 +120872,13 @@ "readOnly": false }, { - "$id": "8991", + "$id": "9013", "kind": "query", "name": "starting_after", "serializedName": "starting_after", "doc": "The sequence number of the event after which to start streaming.", "type": { - "$id": "8992", + "$id": "9014", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -120435,12 +120893,12 @@ "readOnly": false }, { - "$id": "8993", + "$id": "9015", "kind": "header", "name": "accept", "serializedName": "Accept", "type": { - "$id": "8994", + "$id": "9016", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120461,7 +120919,7 @@ 200 ], "bodyType": { - "$id": "8995", + "$id": "9017", "kind": "union", "name": "", "variantTypes": [ @@ -120502,13 +120960,13 @@ }, "parameters": [ { - "$id": "8996", + "$id": "9018", "kind": "method", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to retrieve.", "type": { - "$id": "8997", + "$id": "9019", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120524,7 +120982,7 @@ "decorators": [] }, { - "$id": "8998", + "$id": "9020", "kind": "method", "name": "includables", "serializedName": "include[]", @@ -120541,13 +120999,13 @@ "decorators": [] }, { - "$id": "8999", + "$id": "9021", "kind": "method", "name": "stream", "serializedName": "stream", "doc": "If set to true, model response data will be streamed to the client as it is generated using server-sent events.", "type": { - "$id": "9000", + "$id": "9022", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -120563,13 +121021,13 @@ "decorators": [] }, { - "$id": "9001", + "$id": "9023", "kind": "method", "name": "starting_after", "serializedName": "starting_after", "doc": "The sequence number of the event after which to start streaming.", "type": { - "$id": "9002", + "$id": "9024", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -120585,12 +121043,12 @@ "decorators": [] }, { - "$id": "9003", + "$id": "9025", "kind": "method", "name": "accept", "serializedName": "Accept", "type": { - "$ref": "8994" + "$ref": "9016" }, "location": "Header", "isApiVersion": false, @@ -120604,7 +121062,7 @@ ], "response": { "type": { - "$ref": "8995" + "$ref": "9017" } }, "isOverride": false, @@ -120613,25 +121071,25 @@ "crossLanguageDefinitionId": "OpenAI.Responses.getResponse" }, { - "$id": "9004", + "$id": "9026", "kind": "basic", "name": "deleteResponse", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "9005", + "$id": "9027", "name": "deleteResponse", "resourceName": "Responses", "accessibility": "public", "parameters": [ { - "$id": "9006", + "$id": "9028", "kind": "path", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to delete.", "type": { - "$id": "9007", + "$id": "9029", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120649,7 +121107,7 @@ "crossLanguageDefinitionId": "OpenAI.Responses.deleteResponse.response_id" }, { - "$id": "9008", + "$id": "9030", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -120691,13 +121149,13 @@ }, "parameters": [ { - "$id": "9009", + "$id": "9031", "kind": "method", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to delete.", "type": { - "$id": "9010", + "$id": "9032", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120713,7 +121171,7 @@ "decorators": [] }, { - "$id": "9011", + "$id": "9033", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -120741,25 +121199,25 @@ "crossLanguageDefinitionId": "OpenAI.Responses.deleteResponse" }, { - "$id": "9012", + "$id": "9034", "kind": "basic", "name": "cancelResponse", "accessibility": "public", "apiVersions": [], "operation": { - "$id": "9013", + "$id": "9035", "name": "cancelResponse", "resourceName": "Responses", "accessibility": "public", "parameters": [ { - "$id": "9014", + "$id": "9036", "kind": "path", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to cancel.", "type": { - "$id": "9015", + "$id": "9037", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120777,7 +121235,7 @@ "crossLanguageDefinitionId": "OpenAI.Responses.cancelResponse.response_id" }, { - "$id": "9016", + "$id": "9038", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -120819,13 +121277,13 @@ }, "parameters": [ { - "$id": "9017", + "$id": "9039", "kind": "method", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to cancel.", "type": { - "$id": "9018", + "$id": "9040", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120841,7 +121299,7 @@ "decorators": [] }, { - "$id": "9019", + "$id": "9041", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -120869,27 +121327,27 @@ "crossLanguageDefinitionId": "OpenAI.Responses.cancelResponse" }, { - "$id": "9020", + "$id": "9042", "kind": "paging", "name": "GetResponseInputItems", "accessibility": "public", "apiVersions": [], "doc": "Returns a list of input items for a given response.", "operation": { - "$id": "9021", + "$id": "9043", "name": "GetResponseInputItems", "resourceName": "Responses", "doc": "Returns a list of input items for a given response.", "accessibility": "public", "parameters": [ { - "$id": "9022", + "$id": "9044", "kind": "path", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to retrieve.", "type": { - "$id": "9023", + "$id": "9045", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120907,13 +121365,13 @@ "crossLanguageDefinitionId": "OpenAI.Responses.listInputItems.response_id" }, { - "$id": "9024", + "$id": "9046", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9025", + "$id": "9047", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -120928,7 +121386,7 @@ "readOnly": false }, { - "$id": "9026", + "$id": "9048", "kind": "query", "name": "order", "serializedName": "order", @@ -120945,13 +121403,13 @@ "readOnly": false }, { - "$id": "9027", + "$id": "9049", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9028", + "$id": "9050", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120966,13 +121424,13 @@ "readOnly": false }, { - "$id": "9029", + "$id": "9051", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9030", + "$id": "9052", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -120987,7 +121445,7 @@ "readOnly": false }, { - "$id": "9031", + "$id": "9053", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -121029,13 +121487,13 @@ }, "parameters": [ { - "$id": "9032", + "$id": "9054", "kind": "method", "name": "response_id", "serializedName": "response_id", "doc": "The ID of the response to retrieve.", "type": { - "$id": "9033", + "$id": "9055", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -121051,13 +121509,13 @@ "decorators": [] }, { - "$id": "9034", + "$id": "9056", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9035", + "$id": "9057", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -121073,7 +121531,7 @@ "decorators": [] }, { - "$id": "9036", + "$id": "9058", "kind": "method", "name": "order", "serializedName": "order", @@ -121091,13 +121549,13 @@ "decorators": [] }, { - "$id": "9037", + "$id": "9059", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9038", + "$id": "9060", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -121113,13 +121571,13 @@ "decorators": [] }, { - "$id": "9039", + "$id": "9061", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9040", + "$id": "9062", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -121135,7 +121593,7 @@ "decorators": [] }, { - "$id": "9041", + "$id": "9063", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -121170,7 +121628,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9027" + "$ref": "9049" }, "responseSegments": [ "last_id" @@ -121182,13 +121640,13 @@ ], "parameters": [ { - "$id": "9042", + "$id": "9064", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9043", + "$id": "9065", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -121199,7 +121657,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9044", + "$id": "9066", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -121217,31 +121675,31 @@ "crossLanguageDefinitionId": "OpenAI.Responses", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9045", + "$id": "9067", "kind": "client", "name": "Images", "namespace": "OpenAI", "methods": [ { - "$id": "9046", + "$id": "9068", "kind": "basic", "name": "GenerateImages", "accessibility": "public", "apiVersions": [], "summary": "Creates an image given a prompt", "operation": { - "$id": "9047", + "$id": "9069", "name": "GenerateImages", "resourceName": "Images", "summary": "Creates an image given a prompt", "accessibility": "public", "parameters": [ { - "$id": "9048", + "$id": "9070", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -121257,7 +121715,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImage.accept" }, { - "$id": "9049", + "$id": "9071", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -121274,7 +121732,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImage.contentType" }, { - "$id": "9050", + "$id": "9072", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -121322,7 +121780,7 @@ }, "parameters": [ { - "$id": "9051", + "$id": "9073", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -121339,7 +121797,7 @@ "decorators": [] }, { - "$id": "9052", + "$id": "9074", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -121356,7 +121814,7 @@ "decorators": [] }, { - "$id": "9053", + "$id": "9075", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -121385,21 +121843,21 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImage" }, { - "$id": "9054", + "$id": "9076", "kind": "basic", "name": "GenerateImageEdits", "accessibility": "public", "apiVersions": [], "summary": "Creates an edited or extended image given an original image and a prompt.", "operation": { - "$id": "9055", + "$id": "9077", "name": "GenerateImageEdits", "resourceName": "Images", "summary": "Creates an edited or extended image given an original image and a prompt.", "accessibility": "public", "parameters": [ { - "$id": "9056", + "$id": "9078", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -121415,7 +121873,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImageEdit.accept" }, { - "$id": "9057", + "$id": "9079", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -121431,7 +121889,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImageEdit.contentType" }, { - "$id": "9058", + "$id": "9080", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -121479,7 +121937,7 @@ }, "parameters": [ { - "$id": "9059", + "$id": "9081", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -121496,7 +121954,7 @@ "decorators": [] }, { - "$id": "9060", + "$id": "9082", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -121513,7 +121971,7 @@ "decorators": [] }, { - "$id": "9061", + "$id": "9083", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -121541,21 +121999,21 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImageEdit" }, { - "$id": "9062", + "$id": "9084", "kind": "basic", "name": "GenerateImageVariations", "accessibility": "public", "apiVersions": [], "summary": "Creates an edited or extended image given an original image and a prompt.", "operation": { - "$id": "9063", + "$id": "9085", "name": "GenerateImageVariations", "resourceName": "Images", "summary": "Creates an edited or extended image given an original image and a prompt.", "accessibility": "public", "parameters": [ { - "$id": "9064", + "$id": "9086", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -121571,7 +122029,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImageVariation.accept" }, { - "$id": "9065", + "$id": "9087", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -121587,7 +122045,7 @@ "crossLanguageDefinitionId": "OpenAI.Images.createImageVariation.contentType" }, { - "$id": "9066", + "$id": "9088", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -121635,7 +122093,7 @@ }, "parameters": [ { - "$id": "9067", + "$id": "9089", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -121652,7 +122110,7 @@ "decorators": [] }, { - "$id": "9068", + "$id": "9090", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -121669,7 +122127,7 @@ "decorators": [] }, { - "$id": "9069", + "$id": "9091", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -121699,13 +122157,13 @@ ], "parameters": [ { - "$id": "9070", + "$id": "9092", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9071", + "$id": "9093", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -121716,7 +122174,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9072", + "$id": "9094", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -121734,31 +122192,31 @@ "crossLanguageDefinitionId": "OpenAI.Images", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9073", + "$id": "9095", "kind": "client", "name": "Messages", "namespace": "OpenAI", "methods": [ { - "$id": "9074", + "$id": "9096", "kind": "basic", "name": "createMessage", "accessibility": "public", "apiVersions": [], "summary": "Create a message.", "operation": { - "$id": "9075", + "$id": "9097", "name": "createMessage", "resourceName": "Messages", "summary": "Create a message.", "accessibility": "public", "parameters": [ { - "$id": "9076", + "$id": "9098", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -121774,7 +122232,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.createMessage.accept" }, { - "$id": "9077", + "$id": "9099", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -121790,13 +122248,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.createMessage.openAIBeta" }, { - "$id": "9078", + "$id": "9100", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to create a message for.", "type": { - "$id": "9079", + "$id": "9101", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -121814,7 +122272,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.createMessage.thread_id" }, { - "$id": "9080", + "$id": "9102", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -121831,7 +122289,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.createMessage.contentType" }, { - "$id": "9081", + "$id": "9103", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -121879,7 +122337,7 @@ }, "parameters": [ { - "$id": "9082", + "$id": "9104", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -121896,7 +122354,7 @@ "decorators": [] }, { - "$id": "9083", + "$id": "9105", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -121913,13 +122371,13 @@ "decorators": [] }, { - "$id": "9084", + "$id": "9106", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to create a message for.", "type": { - "$id": "9085", + "$id": "9107", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -121935,7 +122393,7 @@ "decorators": [] }, { - "$id": "9086", + "$id": "9108", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -121952,7 +122410,7 @@ "decorators": [] }, { - "$id": "9087", + "$id": "9109", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -121981,21 +122439,21 @@ "crossLanguageDefinitionId": "OpenAI.Messages.createMessage" }, { - "$id": "9088", + "$id": "9110", "kind": "paging", "name": "listMessages", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of messages for a given thread.", "operation": { - "$id": "9089", + "$id": "9111", "name": "listMessages", "resourceName": "Messages", "summary": "Returns a list of messages for a given thread.", "accessibility": "public", "parameters": [ { - "$id": "9090", + "$id": "9112", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -122011,7 +122469,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.listMessages.accept" }, { - "$id": "9091", + "$id": "9113", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122027,13 +122485,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.listMessages.openAIBeta" }, { - "$id": "9092", + "$id": "9114", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) the messages belong to.", "type": { - "$id": "9093", + "$id": "9115", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122051,13 +122509,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.listMessages.thread_id" }, { - "$id": "9094", + "$id": "9116", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9095", + "$id": "9117", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -122072,7 +122530,7 @@ "readOnly": false }, { - "$id": "9096", + "$id": "9118", "kind": "query", "name": "order", "serializedName": "order", @@ -122089,13 +122547,13 @@ "readOnly": false }, { - "$id": "9097", + "$id": "9119", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9098", + "$id": "9120", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122110,13 +122568,13 @@ "readOnly": false }, { - "$id": "9099", + "$id": "9121", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9100", + "$id": "9122", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122157,7 +122615,7 @@ }, "parameters": [ { - "$id": "9101", + "$id": "9123", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -122174,7 +122632,7 @@ "decorators": [] }, { - "$id": "9102", + "$id": "9124", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122191,13 +122649,13 @@ "decorators": [] }, { - "$id": "9103", + "$id": "9125", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) the messages belong to.", "type": { - "$id": "9104", + "$id": "9126", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122213,13 +122671,13 @@ "decorators": [] }, { - "$id": "9105", + "$id": "9127", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9106", + "$id": "9128", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -122235,7 +122693,7 @@ "decorators": [] }, { - "$id": "9107", + "$id": "9129", "kind": "method", "name": "order", "serializedName": "order", @@ -122253,13 +122711,13 @@ "decorators": [] }, { - "$id": "9108", + "$id": "9130", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9109", + "$id": "9131", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122275,13 +122733,13 @@ "decorators": [] }, { - "$id": "9110", + "$id": "9132", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9111", + "$id": "9133", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122315,7 +122773,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9097" + "$ref": "9119" }, "responseSegments": [ "last_id" @@ -122325,21 +122783,21 @@ } }, { - "$id": "9112", + "$id": "9134", "kind": "basic", "name": "getMessage", "accessibility": "public", "apiVersions": [], "summary": "Retrieve a message.", "operation": { - "$id": "9113", + "$id": "9135", "name": "getMessage", "resourceName": "Messages", "summary": "Retrieve a message.", "accessibility": "public", "parameters": [ { - "$id": "9114", + "$id": "9136", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -122355,7 +122813,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.getMessage.accept" }, { - "$id": "9115", + "$id": "9137", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122371,13 +122829,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.getMessage.openAIBeta" }, { - "$id": "9116", + "$id": "9138", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to which this message belongs.", "type": { - "$id": "9117", + "$id": "9139", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122395,13 +122853,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.getMessage.thread_id" }, { - "$id": "9118", + "$id": "9140", "kind": "path", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to retrieve.", "type": { - "$id": "9119", + "$id": "9141", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122445,7 +122903,7 @@ }, "parameters": [ { - "$id": "9120", + "$id": "9142", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -122462,7 +122920,7 @@ "decorators": [] }, { - "$id": "9121", + "$id": "9143", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122479,13 +122937,13 @@ "decorators": [] }, { - "$id": "9122", + "$id": "9144", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to which this message belongs.", "type": { - "$id": "9123", + "$id": "9145", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122501,13 +122959,13 @@ "decorators": [] }, { - "$id": "9124", + "$id": "9146", "kind": "method", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to retrieve.", "type": { - "$id": "9125", + "$id": "9147", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122534,21 +122992,21 @@ "crossLanguageDefinitionId": "OpenAI.Messages.getMessage" }, { - "$id": "9126", + "$id": "9148", "kind": "basic", "name": "modifyMessage", "accessibility": "public", "apiVersions": [], "summary": "Modifies a message.", "operation": { - "$id": "9127", + "$id": "9149", "name": "modifyMessage", "resourceName": "Messages", "summary": "Modifies a message.", "accessibility": "public", "parameters": [ { - "$id": "9128", + "$id": "9150", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -122564,7 +123022,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage.accept" }, { - "$id": "9129", + "$id": "9151", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122580,13 +123038,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage.openAIBeta" }, { - "$id": "9130", + "$id": "9152", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this message belongs.", "type": { - "$id": "9131", + "$id": "9153", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122604,13 +123062,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage.thread_id" }, { - "$id": "9132", + "$id": "9154", "kind": "path", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to modify.", "type": { - "$id": "9133", + "$id": "9155", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122628,7 +123086,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage.message_id" }, { - "$id": "9134", + "$id": "9156", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -122645,7 +123103,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage.contentType" }, { - "$id": "9135", + "$id": "9157", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -122693,7 +123151,7 @@ }, "parameters": [ { - "$id": "9136", + "$id": "9158", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -122710,7 +123168,7 @@ "decorators": [] }, { - "$id": "9137", + "$id": "9159", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122727,13 +123185,13 @@ "decorators": [] }, { - "$id": "9138", + "$id": "9160", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this message belongs.", "type": { - "$id": "9139", + "$id": "9161", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122749,13 +123207,13 @@ "decorators": [] }, { - "$id": "9140", + "$id": "9162", "kind": "method", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to modify.", "type": { - "$id": "9141", + "$id": "9163", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122771,7 +123229,7 @@ "decorators": [] }, { - "$id": "9142", + "$id": "9164", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -122788,7 +123246,7 @@ "decorators": [] }, { - "$id": "9143", + "$id": "9165", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -122817,21 +123275,21 @@ "crossLanguageDefinitionId": "OpenAI.Messages.modifyMessage" }, { - "$id": "9144", + "$id": "9166", "kind": "basic", "name": "deleteMessage", "accessibility": "public", "apiVersions": [], "summary": "Deletes a message.", "operation": { - "$id": "9145", + "$id": "9167", "name": "deleteMessage", "resourceName": "Messages", "summary": "Deletes a message.", "accessibility": "public", "parameters": [ { - "$id": "9146", + "$id": "9168", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -122847,7 +123305,7 @@ "crossLanguageDefinitionId": "OpenAI.Messages.deleteMessage.accept" }, { - "$id": "9147", + "$id": "9169", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122863,13 +123321,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.deleteMessage.openAIBeta" }, { - "$id": "9148", + "$id": "9170", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this message belongs.", "type": { - "$id": "9149", + "$id": "9171", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122887,13 +123345,13 @@ "crossLanguageDefinitionId": "OpenAI.Messages.deleteMessage.thread_id" }, { - "$id": "9150", + "$id": "9172", "kind": "path", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to delete.", "type": { - "$id": "9151", + "$id": "9173", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122937,7 +123395,7 @@ }, "parameters": [ { - "$id": "9152", + "$id": "9174", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -122954,7 +123412,7 @@ "decorators": [] }, { - "$id": "9153", + "$id": "9175", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -122971,13 +123429,13 @@ "decorators": [] }, { - "$id": "9154", + "$id": "9176", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this message belongs.", "type": { - "$id": "9155", + "$id": "9177", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -122993,13 +123451,13 @@ "decorators": [] }, { - "$id": "9156", + "$id": "9178", "kind": "method", "name": "message_id", "serializedName": "message_id", "doc": "The ID of the message to delete.", "type": { - "$id": "9157", + "$id": "9179", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123028,13 +123486,13 @@ ], "parameters": [ { - "$id": "9158", + "$id": "9180", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9159", + "$id": "9181", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -123045,7 +123503,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9160", + "$id": "9182", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -123063,31 +123521,31 @@ "crossLanguageDefinitionId": "OpenAI.Messages", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9161", + "$id": "9183", "kind": "client", "name": "Moderations", "namespace": "OpenAI", "methods": [ { - "$id": "9162", + "$id": "9184", "kind": "basic", "name": "ClassifyText", "accessibility": "public", "apiVersions": [], "summary": "Classifies if text is potentially harmful.", "operation": { - "$id": "9163", + "$id": "9185", "name": "ClassifyText", "resourceName": "Moderations", "summary": "Classifies if text is potentially harmful.", "accessibility": "public", "parameters": [ { - "$id": "9164", + "$id": "9186", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -123103,7 +123561,7 @@ "crossLanguageDefinitionId": "OpenAI.Moderations.createModeration.accept" }, { - "$id": "9165", + "$id": "9187", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -123120,7 +123578,7 @@ "crossLanguageDefinitionId": "OpenAI.Moderations.createModeration.contentType" }, { - "$id": "9166", + "$id": "9188", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -123168,7 +123626,7 @@ }, "parameters": [ { - "$id": "9167", + "$id": "9189", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -123185,7 +123643,7 @@ "decorators": [] }, { - "$id": "9168", + "$id": "9190", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -123202,7 +123660,7 @@ "decorators": [] }, { - "$id": "9169", + "$id": "9191", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -123233,13 +123691,13 @@ ], "parameters": [ { - "$id": "9170", + "$id": "9192", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9171", + "$id": "9193", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -123250,7 +123708,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9172", + "$id": "9194", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -123268,31 +123726,31 @@ "crossLanguageDefinitionId": "OpenAI.Moderations", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9173", + "$id": "9195", "kind": "client", "name": "Runs", "namespace": "OpenAI", "methods": [ { - "$id": "9174", + "$id": "9196", "kind": "basic", "name": "createThreadAndRun", "accessibility": "public", "apiVersions": [], "summary": "Create a thread and run it in one request.", "operation": { - "$id": "9175", + "$id": "9197", "name": "createThreadAndRun", "resourceName": "Runs", "summary": "Create a thread and run it in one request.", "accessibility": "public", "parameters": [ { - "$id": "9176", + "$id": "9198", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -123308,7 +123766,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createThreadAndRun.accept" }, { - "$id": "9177", + "$id": "9199", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123324,7 +123782,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createThreadAndRun.openAIBeta" }, { - "$id": "9178", + "$id": "9200", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -123341,7 +123799,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createThreadAndRun.contentType" }, { - "$id": "9179", + "$id": "9201", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -123389,7 +123847,7 @@ }, "parameters": [ { - "$id": "9180", + "$id": "9202", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -123406,7 +123864,7 @@ "decorators": [] }, { - "$id": "9181", + "$id": "9203", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123423,7 +123881,7 @@ "decorators": [] }, { - "$id": "9182", + "$id": "9204", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -123440,7 +123898,7 @@ "decorators": [] }, { - "$id": "9183", + "$id": "9205", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -123469,21 +123927,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createThreadAndRun" }, { - "$id": "9184", + "$id": "9206", "kind": "basic", "name": "createRun", "accessibility": "public", "apiVersions": [], "summary": "Create a run.", "operation": { - "$id": "9185", + "$id": "9207", "name": "createRun", "resourceName": "Runs", "summary": "Create a run.", "accessibility": "public", "parameters": [ { - "$id": "9186", + "$id": "9208", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -123499,7 +123957,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createRun.accept" }, { - "$id": "9187", + "$id": "9209", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123515,13 +123973,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createRun.openAIBeta" }, { - "$id": "9188", + "$id": "9210", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to run.", "type": { - "$id": "9189", + "$id": "9211", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123539,13 +123997,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createRun.thread_id" }, { - "$id": "9190", + "$id": "9212", "kind": "query", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$id": "9191", + "$id": "9213", "kind": "array", "name": "ArrayIncludedRunStepProperty", "valueType": { @@ -123564,7 +124022,7 @@ "readOnly": false }, { - "$id": "9192", + "$id": "9214", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -123581,7 +124039,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createRun.contentType" }, { - "$id": "9193", + "$id": "9215", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -123629,7 +124087,7 @@ }, "parameters": [ { - "$id": "9194", + "$id": "9216", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -123646,7 +124104,7 @@ "decorators": [] }, { - "$id": "9195", + "$id": "9217", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123663,13 +124121,13 @@ "decorators": [] }, { - "$id": "9196", + "$id": "9218", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to run.", "type": { - "$id": "9197", + "$id": "9219", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123685,13 +124143,13 @@ "decorators": [] }, { - "$id": "9198", + "$id": "9220", "kind": "method", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$ref": "9191" + "$ref": "9213" }, "location": "Query", "isApiVersion": false, @@ -123703,7 +124161,7 @@ "decorators": [] }, { - "$id": "9199", + "$id": "9221", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -123720,7 +124178,7 @@ "decorators": [] }, { - "$id": "9200", + "$id": "9222", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -123749,21 +124207,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.createRun" }, { - "$id": "9201", + "$id": "9223", "kind": "paging", "name": "listRuns", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of runs belonging to a thread.", "operation": { - "$id": "9202", + "$id": "9224", "name": "listRuns", "resourceName": "Runs", "summary": "Returns a list of runs belonging to a thread.", "accessibility": "public", "parameters": [ { - "$id": "9203", + "$id": "9225", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -123779,7 +124237,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRuns.accept" }, { - "$id": "9204", + "$id": "9226", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123795,13 +124253,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRuns.openAIBeta" }, { - "$id": "9205", + "$id": "9227", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread the run belongs to.", "type": { - "$id": "9206", + "$id": "9228", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123819,13 +124277,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRuns.thread_id" }, { - "$id": "9207", + "$id": "9229", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9208", + "$id": "9230", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -123840,7 +124298,7 @@ "readOnly": false }, { - "$id": "9209", + "$id": "9231", "kind": "query", "name": "order", "serializedName": "order", @@ -123857,13 +124315,13 @@ "readOnly": false }, { - "$id": "9210", + "$id": "9232", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9211", + "$id": "9233", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123878,13 +124336,13 @@ "readOnly": false }, { - "$id": "9212", + "$id": "9234", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9213", + "$id": "9235", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123925,7 +124383,7 @@ }, "parameters": [ { - "$id": "9214", + "$id": "9236", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -123942,7 +124400,7 @@ "decorators": [] }, { - "$id": "9215", + "$id": "9237", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -123959,13 +124417,13 @@ "decorators": [] }, { - "$id": "9216", + "$id": "9238", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread the run belongs to.", "type": { - "$id": "9217", + "$id": "9239", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -123981,13 +124439,13 @@ "decorators": [] }, { - "$id": "9218", + "$id": "9240", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9219", + "$id": "9241", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -124003,7 +124461,7 @@ "decorators": [] }, { - "$id": "9220", + "$id": "9242", "kind": "method", "name": "order", "serializedName": "order", @@ -124021,13 +124479,13 @@ "decorators": [] }, { - "$id": "9221", + "$id": "9243", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9222", + "$id": "9244", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124043,13 +124501,13 @@ "decorators": [] }, { - "$id": "9223", + "$id": "9245", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9224", + "$id": "9246", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124083,7 +124541,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9210" + "$ref": "9232" }, "responseSegments": [ "last_id" @@ -124093,21 +124551,21 @@ } }, { - "$id": "9225", + "$id": "9247", "kind": "basic", "name": "getRun", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a run.", "operation": { - "$id": "9226", + "$id": "9248", "name": "getRun", "resourceName": "Runs", "summary": "Retrieves a run.", "accessibility": "public", "parameters": [ { - "$id": "9227", + "$id": "9249", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -124123,7 +124581,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRun.accept" }, { - "$id": "9228", + "$id": "9250", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124139,13 +124597,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRun.openAIBeta" }, { - "$id": "9229", + "$id": "9251", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) that was run.", "type": { - "$id": "9230", + "$id": "9252", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124163,13 +124621,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRun.thread_id" }, { - "$id": "9231", + "$id": "9253", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to retrieve.", "type": { - "$id": "9232", + "$id": "9254", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124213,7 +124671,7 @@ }, "parameters": [ { - "$id": "9233", + "$id": "9255", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -124230,7 +124688,7 @@ "decorators": [] }, { - "$id": "9234", + "$id": "9256", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124247,13 +124705,13 @@ "decorators": [] }, { - "$id": "9235", + "$id": "9257", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) that was run.", "type": { - "$id": "9236", + "$id": "9258", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124269,13 +124727,13 @@ "decorators": [] }, { - "$id": "9237", + "$id": "9259", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to retrieve.", "type": { - "$id": "9238", + "$id": "9260", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124302,21 +124760,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRun" }, { - "$id": "9239", + "$id": "9261", "kind": "basic", "name": "modifyRun", "accessibility": "public", "apiVersions": [], "summary": "Modifies a run.", "operation": { - "$id": "9240", + "$id": "9262", "name": "modifyRun", "resourceName": "Runs", "summary": "Modifies a run.", "accessibility": "public", "parameters": [ { - "$id": "9241", + "$id": "9263", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -124332,7 +124790,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun.accept" }, { - "$id": "9242", + "$id": "9264", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124348,13 +124806,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun.openAIBeta" }, { - "$id": "9243", + "$id": "9265", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) that was run.", "type": { - "$id": "9244", + "$id": "9266", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124372,13 +124830,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun.thread_id" }, { - "$id": "9245", + "$id": "9267", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to modify.", "type": { - "$id": "9246", + "$id": "9268", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124396,7 +124854,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun.run_id" }, { - "$id": "9247", + "$id": "9269", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -124413,7 +124871,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun.contentType" }, { - "$id": "9248", + "$id": "9270", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -124461,7 +124919,7 @@ }, "parameters": [ { - "$id": "9249", + "$id": "9271", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -124478,7 +124936,7 @@ "decorators": [] }, { - "$id": "9250", + "$id": "9272", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124495,13 +124953,13 @@ "decorators": [] }, { - "$id": "9251", + "$id": "9273", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) that was run.", "type": { - "$id": "9252", + "$id": "9274", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124517,13 +124975,13 @@ "decorators": [] }, { - "$id": "9253", + "$id": "9275", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to modify.", "type": { - "$id": "9254", + "$id": "9276", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124539,7 +124997,7 @@ "decorators": [] }, { - "$id": "9255", + "$id": "9277", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -124556,7 +125014,7 @@ "decorators": [] }, { - "$id": "9256", + "$id": "9278", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -124585,21 +125043,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.modifyRun" }, { - "$id": "9257", + "$id": "9279", "kind": "basic", "name": "cancelRun", "accessibility": "public", "apiVersions": [], "summary": "Cancels a run that is `in_progress`.", "operation": { - "$id": "9258", + "$id": "9280", "name": "cancelRun", "resourceName": "Runs", "summary": "Cancels a run that is `in_progress`.", "accessibility": "public", "parameters": [ { - "$id": "9259", + "$id": "9281", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -124615,7 +125073,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.cancelRun.accept" }, { - "$id": "9260", + "$id": "9282", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124631,13 +125089,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.cancelRun.openAIBeta" }, { - "$id": "9261", + "$id": "9283", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this run belongs.", "type": { - "$id": "9262", + "$id": "9284", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124655,13 +125113,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.cancelRun.thread_id" }, { - "$id": "9263", + "$id": "9285", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to cancel.", "type": { - "$id": "9264", + "$id": "9286", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124705,7 +125163,7 @@ }, "parameters": [ { - "$id": "9265", + "$id": "9287", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -124722,7 +125180,7 @@ "decorators": [] }, { - "$id": "9266", + "$id": "9288", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124739,13 +125197,13 @@ "decorators": [] }, { - "$id": "9267", + "$id": "9289", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which this run belongs.", "type": { - "$id": "9268", + "$id": "9290", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124761,13 +125219,13 @@ "decorators": [] }, { - "$id": "9269", + "$id": "9291", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to cancel.", "type": { - "$id": "9270", + "$id": "9292", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124794,21 +125252,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.cancelRun" }, { - "$id": "9271", + "$id": "9293", "kind": "basic", "name": "submitToolOutputsToRun", "accessibility": "public", "apiVersions": [], "summary": "When a run has the `status: \"requires_action\"` and `required_action.type` is\n`submit_tool_outputs`, this endpoint can be used to submit the outputs from the tool calls once\nthey're all completed. All outputs must be submitted in a single request.", "operation": { - "$id": "9272", + "$id": "9294", "name": "submitToolOutputsToRun", "resourceName": "Runs", "summary": "When a run has the `status: \"requires_action\"` and `required_action.type` is\n`submit_tool_outputs`, this endpoint can be used to submit the outputs from the tool calls once\nthey're all completed. All outputs must be submitted in a single request.", "accessibility": "public", "parameters": [ { - "$id": "9273", + "$id": "9295", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -124824,7 +125282,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun.accept" }, { - "$id": "9274", + "$id": "9296", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124840,13 +125298,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun.openAIBeta" }, { - "$id": "9275", + "$id": "9297", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to which this run belongs.", "type": { - "$id": "9276", + "$id": "9298", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124864,13 +125322,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun.thread_id" }, { - "$id": "9277", + "$id": "9299", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run that requires the tool output submission.", "type": { - "$id": "9278", + "$id": "9300", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -124888,7 +125346,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun.run_id" }, { - "$id": "9279", + "$id": "9301", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -124905,7 +125363,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun.contentType" }, { - "$id": "9280", + "$id": "9302", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -124953,7 +125411,7 @@ }, "parameters": [ { - "$id": "9281", + "$id": "9303", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -124970,7 +125428,7 @@ "decorators": [] }, { - "$id": "9282", + "$id": "9304", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -124987,13 +125445,13 @@ "decorators": [] }, { - "$id": "9283", + "$id": "9305", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the [thread](/docs/api-reference/threads) to which this run belongs.", "type": { - "$id": "9284", + "$id": "9306", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125009,13 +125467,13 @@ "decorators": [] }, { - "$id": "9285", + "$id": "9307", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run that requires the tool output submission.", "type": { - "$id": "9286", + "$id": "9308", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125031,7 +125489,7 @@ "decorators": [] }, { - "$id": "9287", + "$id": "9309", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -125048,7 +125506,7 @@ "decorators": [] }, { - "$id": "9288", + "$id": "9310", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -125077,21 +125535,21 @@ "crossLanguageDefinitionId": "OpenAI.Runs.submitToolOutputsToRun" }, { - "$id": "9289", + "$id": "9311", "kind": "paging", "name": "listRunSteps", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of run steps belonging to a run.", "operation": { - "$id": "9290", + "$id": "9312", "name": "listRunSteps", "resourceName": "Runs", "summary": "Returns a list of run steps belonging to a run.", "accessibility": "public", "parameters": [ { - "$id": "9291", + "$id": "9313", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -125107,7 +125565,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRunSteps.accept" }, { - "$id": "9292", + "$id": "9314", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125123,13 +125581,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRunSteps.openAIBeta" }, { - "$id": "9293", + "$id": "9315", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread the run and run steps belong to.", "type": { - "$id": "9294", + "$id": "9316", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125147,13 +125605,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRunSteps.thread_id" }, { - "$id": "9295", + "$id": "9317", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run the run steps belong to.", "type": { - "$id": "9296", + "$id": "9318", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125171,13 +125629,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.listRunSteps.run_id" }, { - "$id": "9297", + "$id": "9319", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9298", + "$id": "9320", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -125192,7 +125650,7 @@ "readOnly": false }, { - "$id": "9299", + "$id": "9321", "kind": "query", "name": "order", "serializedName": "order", @@ -125209,13 +125667,13 @@ "readOnly": false }, { - "$id": "9300", + "$id": "9322", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9301", + "$id": "9323", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125230,13 +125688,13 @@ "readOnly": false }, { - "$id": "9302", + "$id": "9324", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9303", + "$id": "9325", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125251,13 +125709,13 @@ "readOnly": false }, { - "$id": "9304", + "$id": "9326", "kind": "query", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$ref": "9191" + "$ref": "9213" }, "isApiVersion": false, "explode": false, @@ -125295,7 +125753,7 @@ }, "parameters": [ { - "$id": "9305", + "$id": "9327", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -125312,7 +125770,7 @@ "decorators": [] }, { - "$id": "9306", + "$id": "9328", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125329,13 +125787,13 @@ "decorators": [] }, { - "$id": "9307", + "$id": "9329", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread the run and run steps belong to.", "type": { - "$id": "9308", + "$id": "9330", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125351,13 +125809,13 @@ "decorators": [] }, { - "$id": "9309", + "$id": "9331", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run the run steps belong to.", "type": { - "$id": "9310", + "$id": "9332", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125373,13 +125831,13 @@ "decorators": [] }, { - "$id": "9311", + "$id": "9333", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9312", + "$id": "9334", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -125395,7 +125853,7 @@ "decorators": [] }, { - "$id": "9313", + "$id": "9335", "kind": "method", "name": "order", "serializedName": "order", @@ -125413,13 +125871,13 @@ "decorators": [] }, { - "$id": "9314", + "$id": "9336", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9315", + "$id": "9337", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125435,13 +125893,13 @@ "decorators": [] }, { - "$id": "9316", + "$id": "9338", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9317", + "$id": "9339", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125457,13 +125915,13 @@ "decorators": [] }, { - "$id": "9318", + "$id": "9340", "kind": "method", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$ref": "9191" + "$ref": "9213" }, "location": "Query", "isApiVersion": false, @@ -125493,7 +125951,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9300" + "$ref": "9322" }, "responseSegments": [ "last_id" @@ -125503,21 +125961,21 @@ } }, { - "$id": "9319", + "$id": "9341", "kind": "basic", "name": "getRunStep", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a run step.", "operation": { - "$id": "9320", + "$id": "9342", "name": "getRunStep", "resourceName": "Runs", "summary": "Retrieves a run step.", "accessibility": "public", "parameters": [ { - "$id": "9321", + "$id": "9343", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -125533,7 +125991,7 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRunStep.accept" }, { - "$id": "9322", + "$id": "9344", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125549,13 +126007,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRunStep.openAIBeta" }, { - "$id": "9323", + "$id": "9345", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which the run and run step belongs.", "type": { - "$id": "9324", + "$id": "9346", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125573,13 +126031,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRunStep.thread_id" }, { - "$id": "9325", + "$id": "9347", "kind": "path", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to which the run step belongs.", "type": { - "$id": "9326", + "$id": "9348", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125597,13 +126055,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRunStep.run_id" }, { - "$id": "9327", + "$id": "9349", "kind": "path", "name": "step_id", "serializedName": "step_id", "doc": "The ID of the run step to retrieve.", "type": { - "$id": "9328", + "$id": "9350", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125621,13 +126079,13 @@ "crossLanguageDefinitionId": "OpenAI.Runs.getRunStep.step_id" }, { - "$id": "9329", + "$id": "9351", "kind": "query", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$ref": "9191" + "$ref": "9213" }, "isApiVersion": false, "explode": false, @@ -125665,7 +126123,7 @@ }, "parameters": [ { - "$id": "9330", + "$id": "9352", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -125682,7 +126140,7 @@ "decorators": [] }, { - "$id": "9331", + "$id": "9353", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125699,13 +126157,13 @@ "decorators": [] }, { - "$id": "9332", + "$id": "9354", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to which the run and run step belongs.", "type": { - "$id": "9333", + "$id": "9355", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125721,13 +126179,13 @@ "decorators": [] }, { - "$id": "9334", + "$id": "9356", "kind": "method", "name": "run_id", "serializedName": "run_id", "doc": "The ID of the run to which the run step belongs.", "type": { - "$id": "9335", + "$id": "9357", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125743,13 +126201,13 @@ "decorators": [] }, { - "$id": "9336", + "$id": "9358", "kind": "method", "name": "step_id", "serializedName": "step_id", "doc": "The ID of the run step to retrieve.", "type": { - "$id": "9337", + "$id": "9359", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -125765,13 +126223,13 @@ "decorators": [] }, { - "$id": "9338", + "$id": "9360", "kind": "method", "name": "include[]", "serializedName": "include[]", "doc": "A list of additional fields to include in the response. Currently the only supported value is\n`step_details.tool_calls[*].file_search.results[*].content` to fetch the file search result\ncontent.\n\nSee the\n[file search tool documentation](/docs/assistants/tools/file-search/customizing-file-search-settings)\nfor more information.", "type": { - "$ref": "9191" + "$ref": "9213" }, "location": "Query", "isApiVersion": false, @@ -125796,13 +126254,13 @@ ], "parameters": [ { - "$id": "9339", + "$id": "9361", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9340", + "$id": "9362", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -125813,7 +126271,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9341", + "$id": "9363", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -125831,31 +126289,31 @@ "crossLanguageDefinitionId": "OpenAI.Runs", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9342", + "$id": "9364", "kind": "client", "name": "Threads", "namespace": "OpenAI", "methods": [ { - "$id": "9343", + "$id": "9365", "kind": "basic", "name": "createThread", "accessibility": "public", "apiVersions": [], "summary": "Create a thread.", "operation": { - "$id": "9344", + "$id": "9366", "name": "createThread", "resourceName": "Threads", "summary": "Create a thread.", "accessibility": "public", "parameters": [ { - "$id": "9345", + "$id": "9367", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -125871,7 +126329,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.createThread.accept" }, { - "$id": "9346", + "$id": "9368", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125887,7 +126345,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.createThread.openAIBeta" }, { - "$id": "9347", + "$id": "9369", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -125904,7 +126362,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.createThread.contentType" }, { - "$id": "9348", + "$id": "9370", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -125952,7 +126410,7 @@ }, "parameters": [ { - "$id": "9349", + "$id": "9371", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -125969,7 +126427,7 @@ "decorators": [] }, { - "$id": "9350", + "$id": "9372", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -125986,7 +126444,7 @@ "decorators": [] }, { - "$id": "9351", + "$id": "9373", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -126003,7 +126461,7 @@ "decorators": [] }, { - "$id": "9352", + "$id": "9374", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -126032,21 +126490,21 @@ "crossLanguageDefinitionId": "OpenAI.Threads.createThread" }, { - "$id": "9353", + "$id": "9375", "kind": "basic", "name": "getThread", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a thread.", "operation": { - "$id": "9354", + "$id": "9376", "name": "getThread", "resourceName": "Threads", "summary": "Retrieves a thread.", "accessibility": "public", "parameters": [ { - "$id": "9355", + "$id": "9377", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -126062,7 +126520,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.getThread.accept" }, { - "$id": "9356", + "$id": "9378", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126078,13 +126536,13 @@ "crossLanguageDefinitionId": "OpenAI.Threads.getThread.openAIBeta" }, { - "$id": "9357", + "$id": "9379", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to retrieve.", "type": { - "$id": "9358", + "$id": "9380", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126128,7 +126586,7 @@ }, "parameters": [ { - "$id": "9359", + "$id": "9381", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -126145,7 +126603,7 @@ "decorators": [] }, { - "$id": "9360", + "$id": "9382", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126162,13 +126620,13 @@ "decorators": [] }, { - "$id": "9361", + "$id": "9383", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to retrieve.", "type": { - "$id": "9362", + "$id": "9384", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126195,21 +126653,21 @@ "crossLanguageDefinitionId": "OpenAI.Threads.getThread" }, { - "$id": "9363", + "$id": "9385", "kind": "basic", "name": "modifyThread", "accessibility": "public", "apiVersions": [], "summary": "Modifies a thread.", "operation": { - "$id": "9364", + "$id": "9386", "name": "modifyThread", "resourceName": "Threads", "summary": "Modifies a thread.", "accessibility": "public", "parameters": [ { - "$id": "9365", + "$id": "9387", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -126225,7 +126683,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.modifyThread.accept" }, { - "$id": "9366", + "$id": "9388", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126241,13 +126699,13 @@ "crossLanguageDefinitionId": "OpenAI.Threads.modifyThread.openAIBeta" }, { - "$id": "9367", + "$id": "9389", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to modify. Only the `metadata` can be modified.", "type": { - "$id": "9368", + "$id": "9390", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126265,7 +126723,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.modifyThread.thread_id" }, { - "$id": "9369", + "$id": "9391", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -126282,7 +126740,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.modifyThread.contentType" }, { - "$id": "9370", + "$id": "9392", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -126330,7 +126788,7 @@ }, "parameters": [ { - "$id": "9371", + "$id": "9393", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -126347,7 +126805,7 @@ "decorators": [] }, { - "$id": "9372", + "$id": "9394", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126364,13 +126822,13 @@ "decorators": [] }, { - "$id": "9373", + "$id": "9395", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to modify. Only the `metadata` can be modified.", "type": { - "$id": "9374", + "$id": "9396", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126386,7 +126844,7 @@ "decorators": [] }, { - "$id": "9375", + "$id": "9397", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -126403,7 +126861,7 @@ "decorators": [] }, { - "$id": "9376", + "$id": "9398", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -126432,21 +126890,21 @@ "crossLanguageDefinitionId": "OpenAI.Threads.modifyThread" }, { - "$id": "9377", + "$id": "9399", "kind": "basic", "name": "deleteThread", "accessibility": "public", "apiVersions": [], "summary": "Delete a thread.", "operation": { - "$id": "9378", + "$id": "9400", "name": "deleteThread", "resourceName": "Threads", "summary": "Delete a thread.", "accessibility": "public", "parameters": [ { - "$id": "9379", + "$id": "9401", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -126462,7 +126920,7 @@ "crossLanguageDefinitionId": "OpenAI.Threads.deleteThread.accept" }, { - "$id": "9380", + "$id": "9402", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126478,13 +126936,13 @@ "crossLanguageDefinitionId": "OpenAI.Threads.deleteThread.openAIBeta" }, { - "$id": "9381", + "$id": "9403", "kind": "path", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to delete.", "type": { - "$id": "9382", + "$id": "9404", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126528,7 +126986,7 @@ }, "parameters": [ { - "$id": "9383", + "$id": "9405", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -126545,7 +127003,7 @@ "decorators": [] }, { - "$id": "9384", + "$id": "9406", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -126562,13 +127020,13 @@ "decorators": [] }, { - "$id": "9385", + "$id": "9407", "kind": "method", "name": "thread_id", "serializedName": "thread_id", "doc": "The ID of the thread to delete.", "type": { - "$id": "9386", + "$id": "9408", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126597,13 +127055,13 @@ ], "parameters": [ { - "$id": "9387", + "$id": "9409", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9388", + "$id": "9410", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -126614,7 +127072,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9389", + "$id": "9411", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -126632,31 +127090,31 @@ "crossLanguageDefinitionId": "OpenAI.Threads", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9390", + "$id": "9412", "kind": "client", "name": "VectorStores", "namespace": "OpenAI", "methods": [ { - "$id": "9391", + "$id": "9413", "kind": "paging", "name": "GetVectorStores", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of vector stores.", "operation": { - "$id": "9392", + "$id": "9414", "name": "GetVectorStores", "resourceName": "VectorStores", "summary": "Returns a list of vector stores.", "accessibility": "public", "parameters": [ { - "$id": "9393", + "$id": "9415", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -126672,13 +127130,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listVectorStores.accept" }, { - "$id": "9394", + "$id": "9416", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9395", + "$id": "9417", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -126693,7 +127151,7 @@ "readOnly": false }, { - "$id": "9396", + "$id": "9418", "kind": "query", "name": "order", "serializedName": "order", @@ -126710,13 +127168,13 @@ "readOnly": false }, { - "$id": "9397", + "$id": "9419", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9398", + "$id": "9420", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126731,13 +127189,13 @@ "readOnly": false }, { - "$id": "9399", + "$id": "9421", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9400", + "$id": "9422", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126778,7 +127236,7 @@ }, "parameters": [ { - "$id": "9401", + "$id": "9423", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -126795,13 +127253,13 @@ "decorators": [] }, { - "$id": "9402", + "$id": "9424", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9403", + "$id": "9425", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -126817,7 +127275,7 @@ "decorators": [] }, { - "$id": "9404", + "$id": "9426", "kind": "method", "name": "order", "serializedName": "order", @@ -126835,13 +127293,13 @@ "decorators": [] }, { - "$id": "9405", + "$id": "9427", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9406", + "$id": "9428", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126857,13 +127315,13 @@ "decorators": [] }, { - "$id": "9407", + "$id": "9429", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9408", + "$id": "9430", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -126897,7 +127355,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9397" + "$ref": "9419" }, "responseSegments": [ "last_id" @@ -126907,21 +127365,21 @@ } }, { - "$id": "9409", + "$id": "9431", "kind": "basic", "name": "createVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Creates a vector store.", "operation": { - "$id": "9410", + "$id": "9432", "name": "createVectorStore", "resourceName": "VectorStores", "summary": "Creates a vector store.", "accessibility": "public", "parameters": [ { - "$id": "9411", + "$id": "9433", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -126937,7 +127395,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStore.accept" }, { - "$id": "9412", + "$id": "9434", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -126954,7 +127412,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStore.contentType" }, { - "$id": "9413", + "$id": "9435", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -127002,7 +127460,7 @@ }, "parameters": [ { - "$id": "9414", + "$id": "9436", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127019,7 +127477,7 @@ "decorators": [] }, { - "$id": "9415", + "$id": "9437", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -127036,7 +127494,7 @@ "decorators": [] }, { - "$id": "9416", + "$id": "9438", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -127065,21 +127523,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStore" }, { - "$id": "9417", + "$id": "9439", "kind": "basic", "name": "getVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a vector store.", "operation": { - "$id": "9418", + "$id": "9440", "name": "getVectorStore", "resourceName": "VectorStores", "summary": "Retrieves a vector store.", "accessibility": "public", "parameters": [ { - "$id": "9419", + "$id": "9441", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127095,13 +127553,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStore.accept" }, { - "$id": "9420", + "$id": "9442", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to retrieve.", "type": { - "$id": "9421", + "$id": "9443", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127145,7 +127603,7 @@ }, "parameters": [ { - "$id": "9422", + "$id": "9444", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127162,13 +127620,13 @@ "decorators": [] }, { - "$id": "9423", + "$id": "9445", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to retrieve.", "type": { - "$id": "9424", + "$id": "9446", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127195,21 +127653,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStore" }, { - "$id": "9425", + "$id": "9447", "kind": "basic", "name": "modifyVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Modifies a vector store.", "operation": { - "$id": "9426", + "$id": "9448", "name": "modifyVectorStore", "resourceName": "VectorStores", "summary": "Modifies a vector store.", "accessibility": "public", "parameters": [ { - "$id": "9427", + "$id": "9449", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127225,13 +127683,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.modifyVectorStore.accept" }, { - "$id": "9428", + "$id": "9450", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to modify.", "type": { - "$id": "9429", + "$id": "9451", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127249,7 +127707,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.modifyVectorStore.vector_store_id" }, { - "$id": "9430", + "$id": "9452", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -127266,7 +127724,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.modifyVectorStore.contentType" }, { - "$id": "9431", + "$id": "9453", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -127314,7 +127772,7 @@ }, "parameters": [ { - "$id": "9432", + "$id": "9454", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127331,13 +127789,13 @@ "decorators": [] }, { - "$id": "9433", + "$id": "9455", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to modify.", "type": { - "$id": "9434", + "$id": "9456", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127353,7 +127811,7 @@ "decorators": [] }, { - "$id": "9435", + "$id": "9457", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -127370,7 +127828,7 @@ "decorators": [] }, { - "$id": "9436", + "$id": "9458", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -127399,21 +127857,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.modifyVectorStore" }, { - "$id": "9437", + "$id": "9459", "kind": "basic", "name": "deleteVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Delete a vector store.", "operation": { - "$id": "9438", + "$id": "9460", "name": "deleteVectorStore", "resourceName": "VectorStores", "summary": "Delete a vector store.", "accessibility": "public", "parameters": [ { - "$id": "9439", + "$id": "9461", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127429,13 +127887,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.deleteVectorStore.accept" }, { - "$id": "9440", + "$id": "9462", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to delete.", "type": { - "$id": "9441", + "$id": "9463", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127479,7 +127937,7 @@ }, "parameters": [ { - "$id": "9442", + "$id": "9464", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127496,13 +127954,13 @@ "decorators": [] }, { - "$id": "9443", + "$id": "9465", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to delete.", "type": { - "$id": "9444", + "$id": "9466", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127529,21 +127987,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.deleteVectorStore" }, { - "$id": "9445", + "$id": "9467", "kind": "basic", "name": "AddFileBatchToVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Create a vector store file batch.", "operation": { - "$id": "9446", + "$id": "9468", "name": "AddFileBatchToVectorStore", "resourceName": "VectorStores", "summary": "Create a vector store file batch.", "accessibility": "public", "parameters": [ { - "$id": "9447", + "$id": "9469", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127559,13 +128017,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFileBatch.accept" }, { - "$id": "9448", + "$id": "9470", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store for which to create a file batch.", "type": { - "$id": "9449", + "$id": "9471", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127583,7 +128041,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFileBatch.vector_store_id" }, { - "$id": "9450", + "$id": "9472", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -127600,7 +128058,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFileBatch.contentType" }, { - "$id": "9451", + "$id": "9473", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -127648,7 +128106,7 @@ }, "parameters": [ { - "$id": "9452", + "$id": "9474", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127665,13 +128123,13 @@ "decorators": [] }, { - "$id": "9453", + "$id": "9475", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store for which to create a file batch.", "type": { - "$id": "9454", + "$id": "9476", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127687,7 +128145,7 @@ "decorators": [] }, { - "$id": "9455", + "$id": "9477", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -127704,7 +128162,7 @@ "decorators": [] }, { - "$id": "9456", + "$id": "9478", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -127733,21 +128191,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFileBatch" }, { - "$id": "9457", + "$id": "9479", "kind": "basic", "name": "getVectorStoreFileBatch", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a vector store file batch.", "operation": { - "$id": "9458", + "$id": "9480", "name": "getVectorStoreFileBatch", "resourceName": "VectorStores", "summary": "Retrieves a vector store file batch.", "accessibility": "public", "parameters": [ { - "$id": "9459", + "$id": "9481", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127763,13 +128221,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFileBatch.accept" }, { - "$id": "9460", + "$id": "9482", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9461", + "$id": "9483", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127787,13 +128245,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFileBatch.vector_store_id" }, { - "$id": "9462", + "$id": "9484", "kind": "path", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch being retrieved.", "type": { - "$id": "9463", + "$id": "9485", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127837,7 +128295,7 @@ }, "parameters": [ { - "$id": "9464", + "$id": "9486", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -127854,13 +128312,13 @@ "decorators": [] }, { - "$id": "9465", + "$id": "9487", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9466", + "$id": "9488", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127876,13 +128334,13 @@ "decorators": [] }, { - "$id": "9467", + "$id": "9489", "kind": "method", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch being retrieved.", "type": { - "$id": "9468", + "$id": "9490", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127909,21 +128367,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFileBatch" }, { - "$id": "9469", + "$id": "9491", "kind": "basic", "name": "cancelVectorStoreFileBatch", "accessibility": "public", "apiVersions": [], "summary": "Cancel a vector store file batch. This attempts to cancel the processing of files in this batch as soon as possible.", "operation": { - "$id": "9470", + "$id": "9492", "name": "cancelVectorStoreFileBatch", "resourceName": "VectorStores", "summary": "Cancel a vector store file batch. This attempts to cancel the processing of files in this batch as soon as possible.", "accessibility": "public", "parameters": [ { - "$id": "9471", + "$id": "9493", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -127939,13 +128397,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.cancelVectorStoreFileBatch.accept" }, { - "$id": "9472", + "$id": "9494", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9473", + "$id": "9495", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -127963,13 +128421,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.cancelVectorStoreFileBatch.vector_store_id" }, { - "$id": "9474", + "$id": "9496", "kind": "path", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch to cancel.", "type": { - "$id": "9475", + "$id": "9497", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128013,7 +128471,7 @@ }, "parameters": [ { - "$id": "9476", + "$id": "9498", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -128030,13 +128488,13 @@ "decorators": [] }, { - "$id": "9477", + "$id": "9499", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9478", + "$id": "9500", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128052,13 +128510,13 @@ "decorators": [] }, { - "$id": "9479", + "$id": "9501", "kind": "method", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch to cancel.", "type": { - "$id": "9480", + "$id": "9502", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128085,21 +128543,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.cancelVectorStoreFileBatch" }, { - "$id": "9481", + "$id": "9503", "kind": "paging", "name": "GetVectorStoreFilesInBatch", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of vector store files in a batch.", "operation": { - "$id": "9482", + "$id": "9504", "name": "GetVectorStoreFilesInBatch", "resourceName": "VectorStores", "summary": "Returns a list of vector store files in a batch.", "accessibility": "public", "parameters": [ { - "$id": "9483", + "$id": "9505", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -128115,13 +128573,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listFilesInVectorStoreBatch.accept" }, { - "$id": "9484", + "$id": "9506", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9485", + "$id": "9507", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128139,13 +128597,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listFilesInVectorStoreBatch.vector_store_id" }, { - "$id": "9486", + "$id": "9508", "kind": "path", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch that the files belong to.", "type": { - "$id": "9487", + "$id": "9509", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128163,13 +128621,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listFilesInVectorStoreBatch.batch_id" }, { - "$id": "9488", + "$id": "9510", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9489", + "$id": "9511", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -128184,7 +128642,7 @@ "readOnly": false }, { - "$id": "9490", + "$id": "9512", "kind": "query", "name": "order", "serializedName": "order", @@ -128201,13 +128659,13 @@ "readOnly": false }, { - "$id": "9491", + "$id": "9513", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9492", + "$id": "9514", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128222,13 +128680,13 @@ "readOnly": false }, { - "$id": "9493", + "$id": "9515", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9494", + "$id": "9516", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128243,7 +128701,7 @@ "readOnly": false }, { - "$id": "9495", + "$id": "9517", "kind": "query", "name": "filter", "serializedName": "filter", @@ -128286,7 +128744,7 @@ }, "parameters": [ { - "$id": "9496", + "$id": "9518", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -128303,13 +128761,13 @@ "decorators": [] }, { - "$id": "9497", + "$id": "9519", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file batch belongs to.", "type": { - "$id": "9498", + "$id": "9520", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128325,13 +128783,13 @@ "decorators": [] }, { - "$id": "9499", + "$id": "9521", "kind": "method", "name": "batch_id", "serializedName": "batch_id", "doc": "The ID of the file batch that the files belong to.", "type": { - "$id": "9500", + "$id": "9522", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128347,13 +128805,13 @@ "decorators": [] }, { - "$id": "9501", + "$id": "9523", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9502", + "$id": "9524", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -128369,7 +128827,7 @@ "decorators": [] }, { - "$id": "9503", + "$id": "9525", "kind": "method", "name": "order", "serializedName": "order", @@ -128387,13 +128845,13 @@ "decorators": [] }, { - "$id": "9504", + "$id": "9526", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9505", + "$id": "9527", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128409,13 +128867,13 @@ "decorators": [] }, { - "$id": "9506", + "$id": "9528", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9507", + "$id": "9529", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128431,7 +128889,7 @@ "decorators": [] }, { - "$id": "9508", + "$id": "9530", "kind": "method", "name": "filter", "serializedName": "filter", @@ -128467,7 +128925,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9491" + "$ref": "9513" }, "responseSegments": [ "last_id" @@ -128477,21 +128935,21 @@ } }, { - "$id": "9509", + "$id": "9531", "kind": "paging", "name": "listVectorStoreFiles", "accessibility": "public", "apiVersions": [], "summary": "Returns a list of vector store files.", "operation": { - "$id": "9510", + "$id": "9532", "name": "listVectorStoreFiles", "resourceName": "VectorStores", "summary": "Returns a list of vector store files.", "accessibility": "public", "parameters": [ { - "$id": "9511", + "$id": "9533", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -128507,13 +128965,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listVectorStoreFiles.accept" }, { - "$id": "9512", + "$id": "9534", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the files belong to.", "type": { - "$id": "9513", + "$id": "9535", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128531,13 +128989,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.listVectorStoreFiles.vector_store_id" }, { - "$id": "9514", + "$id": "9536", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9515", + "$id": "9537", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -128552,7 +129010,7 @@ "readOnly": false }, { - "$id": "9516", + "$id": "9538", "kind": "query", "name": "order", "serializedName": "order", @@ -128569,13 +129027,13 @@ "readOnly": false }, { - "$id": "9517", + "$id": "9539", "kind": "query", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9518", + "$id": "9540", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128590,13 +129048,13 @@ "readOnly": false }, { - "$id": "9519", + "$id": "9541", "kind": "query", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9520", + "$id": "9542", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128611,7 +129069,7 @@ "readOnly": false }, { - "$id": "9521", + "$id": "9543", "kind": "query", "name": "filter", "serializedName": "filter", @@ -128654,7 +129112,7 @@ }, "parameters": [ { - "$id": "9522", + "$id": "9544", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -128671,13 +129129,13 @@ "decorators": [] }, { - "$id": "9523", + "$id": "9545", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the files belong to.", "type": { - "$id": "9524", + "$id": "9546", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128693,13 +129151,13 @@ "decorators": [] }, { - "$id": "9525", + "$id": "9547", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the\ndefault is 20.", "type": { - "$id": "9526", + "$id": "9548", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -128715,7 +129173,7 @@ "decorators": [] }, { - "$id": "9527", + "$id": "9549", "kind": "method", "name": "order", "serializedName": "order", @@ -128733,13 +129191,13 @@ "decorators": [] }, { - "$id": "9528", + "$id": "9550", "kind": "method", "name": "after", "serializedName": "after", "doc": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include after=obj_foo in order to fetch the next page of the list.", "type": { - "$id": "9529", + "$id": "9551", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128755,13 +129213,13 @@ "decorators": [] }, { - "$id": "9530", + "$id": "9552", "kind": "method", "name": "before", "serializedName": "before", "doc": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.\nFor instance, if you make a list request and receive 100 objects, ending with obj_foo, your\nsubsequent call can include before=obj_foo in order to fetch the previous page of the list.", "type": { - "$id": "9531", + "$id": "9553", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128777,7 +129235,7 @@ "decorators": [] }, { - "$id": "9532", + "$id": "9554", "kind": "method", "name": "filter", "serializedName": "filter", @@ -128813,7 +129271,7 @@ ], "continuationToken": { "parameter": { - "$ref": "9517" + "$ref": "9539" }, "responseSegments": [ "last_id" @@ -128823,21 +129281,21 @@ } }, { - "$id": "9533", + "$id": "9555", "kind": "basic", "name": "AddFileToVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Create a vector store file by attaching a [File](/docs/api-reference/files) to a [vector store](/docs/api-reference/vector-stores/object).", "operation": { - "$id": "9534", + "$id": "9556", "name": "AddFileToVectorStore", "resourceName": "VectorStores", "summary": "Create a vector store file by attaching a [File](/docs/api-reference/files) to a [vector store](/docs/api-reference/vector-stores/object).", "accessibility": "public", "parameters": [ { - "$id": "9535", + "$id": "9557", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -128853,13 +129311,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFile.accept" }, { - "$id": "9536", + "$id": "9558", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store for which to create a File.", "type": { - "$id": "9537", + "$id": "9559", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128877,7 +129335,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFile.vector_store_id" }, { - "$id": "9538", + "$id": "9560", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -128894,7 +129352,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFile.contentType" }, { - "$id": "9539", + "$id": "9561", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -128942,7 +129400,7 @@ }, "parameters": [ { - "$id": "9540", + "$id": "9562", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -128959,13 +129417,13 @@ "decorators": [] }, { - "$id": "9541", + "$id": "9563", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store for which to create a File.", "type": { - "$id": "9542", + "$id": "9564", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -128981,7 +129439,7 @@ "decorators": [] }, { - "$id": "9543", + "$id": "9565", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -128998,7 +129456,7 @@ "decorators": [] }, { - "$id": "9544", + "$id": "9566", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -129027,21 +129485,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.createVectorStoreFile" }, { - "$id": "9545", + "$id": "9567", "kind": "basic", "name": "getVectorStoreFile", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a vector store file.", "operation": { - "$id": "9546", + "$id": "9568", "name": "getVectorStoreFile", "resourceName": "VectorStores", "summary": "Retrieves a vector store file.", "accessibility": "public", "parameters": [ { - "$id": "9547", + "$id": "9569", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -129057,13 +129515,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFile.accept" }, { - "$id": "9548", + "$id": "9570", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9549", + "$id": "9571", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129081,13 +129539,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFile.vector_store_id" }, { - "$id": "9550", + "$id": "9572", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file being retrieved.", "type": { - "$id": "9551", + "$id": "9573", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129131,7 +129589,7 @@ }, "parameters": [ { - "$id": "9552", + "$id": "9574", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -129148,13 +129606,13 @@ "decorators": [] }, { - "$id": "9553", + "$id": "9575", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9554", + "$id": "9576", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129170,13 +129628,13 @@ "decorators": [] }, { - "$id": "9555", + "$id": "9577", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file being retrieved.", "type": { - "$id": "9556", + "$id": "9578", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129203,21 +129661,21 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.getVectorStoreFile" }, { - "$id": "9557", + "$id": "9579", "kind": "basic", "name": "RemoveFileFromVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Delete a vector store file. This will remove the file from the vector store but the file itself will not be deleted. To delete the file, use the [delete file](/docs/api-reference/files/delete) endpoint.", "operation": { - "$id": "9558", + "$id": "9580", "name": "RemoveFileFromVectorStore", "resourceName": "VectorStores", "summary": "Delete a vector store file. This will remove the file from the vector store but the file itself will not be deleted. To delete the file, use the [delete file](/docs/api-reference/files/delete) endpoint.", "accessibility": "public", "parameters": [ { - "$id": "9559", + "$id": "9581", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -129233,13 +129691,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.deleteVectorStoreFile.accept" }, { - "$id": "9560", + "$id": "9582", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9561", + "$id": "9583", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129257,13 +129715,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.deleteVectorStoreFile.vector_store_id" }, { - "$id": "9562", + "$id": "9584", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to delete.", "type": { - "$id": "9563", + "$id": "9585", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129307,7 +129765,7 @@ }, "parameters": [ { - "$id": "9564", + "$id": "9586", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -129324,13 +129782,13 @@ "decorators": [] }, { - "$id": "9565", + "$id": "9587", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9566", + "$id": "9588", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129346,13 +129804,13 @@ "decorators": [] }, { - "$id": "9567", + "$id": "9589", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to delete.", "type": { - "$id": "9568", + "$id": "9590", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129379,27 +129837,27 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.deleteVectorStoreFile" }, { - "$id": "9569", + "$id": "9591", "kind": "basic", "name": "updateVectorStoreFileAttributes", "accessibility": "public", "apiVersions": [], "summary": "Update the attributes of a vector store file.", "operation": { - "$id": "9570", + "$id": "9592", "name": "updateVectorStoreFileAttributes", "resourceName": "VectorStores", "summary": "Update the attributes of a vector store file.", "accessibility": "public", "parameters": [ { - "$id": "9571", + "$id": "9593", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store the file being updated belongs to.", "type": { - "$id": "9572", + "$id": "9594", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129417,13 +129875,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.updateVectorStoreFileAttributes.vector_store_id" }, { - "$id": "9573", + "$id": "9595", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to update attributes for.", "type": { - "$id": "9574", + "$id": "9596", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129441,7 +129899,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.updateVectorStoreFileAttributes.file_id" }, { - "$id": "9575", + "$id": "9597", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -129458,7 +129916,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.updateVectorStoreFileAttributes.contentType" }, { - "$id": "9576", + "$id": "9598", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -129474,7 +129932,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.updateVectorStoreFileAttributes.accept" }, { - "$id": "9577", + "$id": "9599", "kind": "body", "name": "updateVectorStoreFileAttributesRequest", "serializedName": "updateVectorStoreFileAttributesRequest", @@ -129522,13 +129980,13 @@ }, "parameters": [ { - "$id": "9578", + "$id": "9600", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store the file being updated belongs to.", "type": { - "$id": "9579", + "$id": "9601", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129544,13 +130002,13 @@ "decorators": [] }, { - "$id": "9580", + "$id": "9602", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file to update attributes for.", "type": { - "$id": "9581", + "$id": "9603", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129566,7 +130024,7 @@ "decorators": [] }, { - "$id": "9582", + "$id": "9604", "kind": "method", "name": "attributes", "serializedName": "attributes", @@ -129583,7 +130041,7 @@ "decorators": [] }, { - "$id": "9583", + "$id": "9605", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -129601,7 +130059,7 @@ "decorators": [] }, { - "$id": "9584", + "$id": "9606", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -129629,27 +130087,27 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.updateVectorStoreFileAttributes" }, { - "$id": "9585", + "$id": "9607", "kind": "basic", "name": "retrieveVectorStoreFileContent", "accessibility": "public", "apiVersions": [], "summary": "Retrieves the content of a vector store file.", "operation": { - "$id": "9586", + "$id": "9608", "name": "retrieveVectorStoreFileContent", "resourceName": "VectorStores", "summary": "Retrieves the content of a vector store file.", "accessibility": "public", "parameters": [ { - "$id": "9587", + "$id": "9609", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9588", + "$id": "9610", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129667,13 +130125,13 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.retrieveVectorStoreFileContent.vector_store_id" }, { - "$id": "9589", + "$id": "9611", "kind": "path", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file with content being retrieved.", "type": { - "$id": "9590", + "$id": "9612", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129691,7 +130149,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.retrieveVectorStoreFileContent.file_id" }, { - "$id": "9591", + "$id": "9613", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -129733,13 +130191,13 @@ }, "parameters": [ { - "$id": "9592", + "$id": "9614", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store that the file belongs to.", "type": { - "$id": "9593", + "$id": "9615", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129755,13 +130213,13 @@ "decorators": [] }, { - "$id": "9594", + "$id": "9616", "kind": "method", "name": "file_id", "serializedName": "file_id", "doc": "The ID of the file with content being retrieved.", "type": { - "$id": "9595", + "$id": "9617", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129777,7 +130235,7 @@ "decorators": [] }, { - "$id": "9596", + "$id": "9618", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -129805,27 +130263,27 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.retrieveVectorStoreFileContent" }, { - "$id": "9597", + "$id": "9619", "kind": "basic", "name": "searchVectorStore", "accessibility": "public", "apiVersions": [], "summary": "Searches a vector store for relevant chunks based on a query and file attributes filter.", "operation": { - "$id": "9598", + "$id": "9620", "name": "searchVectorStore", "resourceName": "VectorStores", "summary": "Searches a vector store for relevant chunks based on a query and file attributes filter.", "accessibility": "public", "parameters": [ { - "$id": "9599", + "$id": "9621", "kind": "path", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to search.", "type": { - "$id": "9600", + "$id": "9622", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129843,7 +130301,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.searchVectorStore.vector_store_id" }, { - "$id": "9601", + "$id": "9623", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -129860,7 +130318,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.searchVectorStore.contentType" }, { - "$id": "9602", + "$id": "9624", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -129876,7 +130334,7 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores.searchVectorStore.accept" }, { - "$id": "9603", + "$id": "9625", "kind": "body", "name": "vectorStoreSearchRequest", "serializedName": "vectorStoreSearchRequest", @@ -129924,13 +130382,13 @@ }, "parameters": [ { - "$id": "9604", + "$id": "9626", "kind": "method", "name": "vector_store_id", "serializedName": "vector_store_id", "doc": "The ID of the vector store to search.", "type": { - "$id": "9605", + "$id": "9627", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -129946,7 +130404,7 @@ "decorators": [] }, { - "$id": "9606", + "$id": "9628", "kind": "method", "name": "query", "serializedName": "query", @@ -129964,13 +130422,13 @@ "decorators": [] }, { - "$id": "9607", + "$id": "9629", "kind": "method", "name": "rewrite_query", "serializedName": "rewrite_query", "doc": "Whether to rewrite the natural language query for vector search.", "type": { - "$id": "9608", + "$id": "9630", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -129986,13 +130444,13 @@ "decorators": [] }, { - "$id": "9609", + "$id": "9631", "kind": "method", "name": "max_num_results", "serializedName": "max_num_results", "doc": "The maximum number of results to return. This number should be between 1 and 50 inclusive.", "type": { - "$id": "9610", + "$id": "9632", "kind": "int32", "name": "int32", "crossLanguageDefinitionId": "TypeSpec.int32", @@ -130008,7 +130466,7 @@ "decorators": [] }, { - "$id": "9611", + "$id": "9633", "kind": "method", "name": "filters", "serializedName": "filters", @@ -130026,7 +130484,7 @@ "decorators": [] }, { - "$id": "9612", + "$id": "9634", "kind": "method", "name": "ranking_options", "serializedName": "ranking_options", @@ -130044,7 +130502,7 @@ "decorators": [] }, { - "$id": "9613", + "$id": "9635", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -130062,7 +130520,7 @@ "decorators": [] }, { - "$id": "9614", + "$id": "9636", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130092,13 +130550,13 @@ ], "parameters": [ { - "$id": "9615", + "$id": "9637", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9616", + "$id": "9638", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -130109,7 +130567,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9617", + "$id": "9639", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -130127,31 +130585,31 @@ "crossLanguageDefinitionId": "OpenAI.VectorStores", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9618", + "$id": "9640", "kind": "client", "name": "Completions", "namespace": "OpenAI", "methods": [ { - "$id": "9619", + "$id": "9641", "kind": "basic", "name": "createCompletion", "accessibility": "public", "apiVersions": [], "summary": "Creates a completion for the provided prompt and parameters.", "operation": { - "$id": "9620", + "$id": "9642", "name": "createCompletion", "resourceName": "Completions", "summary": "Creates a completion for the provided prompt and parameters.", "accessibility": "public", "parameters": [ { - "$id": "9621", + "$id": "9643", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130167,7 +130625,7 @@ "crossLanguageDefinitionId": "OpenAI.Completions.createCompletion.accept" }, { - "$id": "9622", + "$id": "9644", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -130184,7 +130642,7 @@ "crossLanguageDefinitionId": "OpenAI.Completions.createCompletion.contentType" }, { - "$id": "9623", + "$id": "9645", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -130232,7 +130690,7 @@ }, "parameters": [ { - "$id": "9624", + "$id": "9646", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130249,7 +130707,7 @@ "decorators": [] }, { - "$id": "9625", + "$id": "9647", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -130266,7 +130724,7 @@ "decorators": [] }, { - "$id": "9626", + "$id": "9648", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -130297,13 +130755,13 @@ ], "parameters": [ { - "$id": "9627", + "$id": "9649", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9628", + "$id": "9650", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -130314,7 +130772,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9629", + "$id": "9651", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -130332,31 +130790,31 @@ "crossLanguageDefinitionId": "OpenAI.Completions", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9630", + "$id": "9652", "kind": "client", "name": "Models", "namespace": "OpenAI", "methods": [ { - "$id": "9631", + "$id": "9653", "kind": "basic", "name": "listModels", "accessibility": "public", "apiVersions": [], "summary": "Lists the currently available models, and provides basic information about each one such as the\nowner and availability.", "operation": { - "$id": "9632", + "$id": "9654", "name": "listModels", "resourceName": "Models", "summary": "Lists the currently available models, and provides basic information about each one such as the\nowner and availability.", "accessibility": "public", "parameters": [ { - "$id": "9633", + "$id": "9655", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130398,7 +130856,7 @@ }, "parameters": [ { - "$id": "9634", + "$id": "9656", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130426,21 +130884,21 @@ "crossLanguageDefinitionId": "OpenAI.Models.listModels" }, { - "$id": "9635", + "$id": "9657", "kind": "basic", "name": "retrieveModel", "accessibility": "public", "apiVersions": [], "summary": "Retrieves a model instance, providing basic information about the model such as the owner and\npermissioning.", "operation": { - "$id": "9636", + "$id": "9658", "name": "retrieveModel", "resourceName": "Models", "summary": "Retrieves a model instance, providing basic information about the model such as the owner and\npermissioning.", "accessibility": "public", "parameters": [ { - "$id": "9637", + "$id": "9659", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130456,13 +130914,13 @@ "crossLanguageDefinitionId": "OpenAI.Models.retrieveModel.accept" }, { - "$id": "9638", + "$id": "9660", "kind": "path", "name": "model", "serializedName": "model", "doc": "The ID of the model to use for this request.", "type": { - "$id": "9639", + "$id": "9661", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -130506,7 +130964,7 @@ }, "parameters": [ { - "$id": "9640", + "$id": "9662", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130523,13 +130981,13 @@ "decorators": [] }, { - "$id": "9641", + "$id": "9663", "kind": "method", "name": "model", "serializedName": "model", "doc": "The ID of the model to use for this request.", "type": { - "$id": "9642", + "$id": "9664", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -130556,21 +131014,21 @@ "crossLanguageDefinitionId": "OpenAI.Models.retrieveModel" }, { - "$id": "9643", + "$id": "9665", "kind": "basic", "name": "deleteModel", "accessibility": "public", "apiVersions": [], "summary": "Delete a fine-tuned model. You must have the Owner role in your organization to delete a model.", "operation": { - "$id": "9644", + "$id": "9666", "name": "deleteModel", "resourceName": "Models", "summary": "Delete a fine-tuned model. You must have the Owner role in your organization to delete a model.", "accessibility": "public", "parameters": [ { - "$id": "9645", + "$id": "9667", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130586,13 +131044,13 @@ "crossLanguageDefinitionId": "OpenAI.Models.deleteModel.accept" }, { - "$id": "9646", + "$id": "9668", "kind": "path", "name": "model", "serializedName": "model", "doc": "The model to delete", "type": { - "$id": "9647", + "$id": "9669", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -130636,7 +131094,7 @@ }, "parameters": [ { - "$id": "9648", + "$id": "9670", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130653,13 +131111,13 @@ "decorators": [] }, { - "$id": "9649", + "$id": "9671", "kind": "method", "name": "model", "serializedName": "model", "doc": "The model to delete", "type": { - "$id": "9650", + "$id": "9672", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -130688,13 +131146,13 @@ ], "parameters": [ { - "$id": "9651", + "$id": "9673", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9652", + "$id": "9674", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -130705,7 +131163,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9653", + "$id": "9675", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -130723,31 +131181,31 @@ "crossLanguageDefinitionId": "OpenAI.Models", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9654", + "$id": "9676", "kind": "client", "name": "Realtime", "namespace": "OpenAI", "methods": [ { - "$id": "9655", + "$id": "9677", "kind": "basic", "name": "startRealtimeSession", "accessibility": "public", "apiVersions": [], "summary": "Starts a real-time session for conversation or transcription.", "operation": { - "$id": "9656", + "$id": "9678", "name": "startRealtimeSession", "resourceName": "Realtime", "summary": "Starts a real-time session for conversation or transcription.", "accessibility": "public", "parameters": [ { - "$id": "9657", + "$id": "9679", "kind": "header", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -130763,7 +131221,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.startRealtimeSession.openAIBeta" }, { - "$id": "9658", + "$id": "9680", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -130780,7 +131238,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.startRealtimeSession.contentType" }, { - "$id": "9659", + "$id": "9681", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130796,12 +131254,12 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.startRealtimeSession.accept" }, { - "$id": "9660", + "$id": "9682", "kind": "body", "name": "requestMessages", "serializedName": "requestMessages", "type": { - "$id": "9661", + "$id": "9683", "kind": "array", "name": "ArrayRealtimeClientEvent", "valueType": { @@ -130828,7 +131286,7 @@ 200 ], "bodyType": { - "$id": "9662", + "$id": "9684", "kind": "array", "name": "ArrayRealtimeServerEvent", "valueType": { @@ -130858,7 +131316,7 @@ }, "parameters": [ { - "$id": "9663", + "$id": "9685", "kind": "method", "name": "openAIBeta", "serializedName": "OpenAI-Beta", @@ -130875,12 +131333,12 @@ "decorators": [] }, { - "$id": "9664", + "$id": "9686", "kind": "method", "name": "requestMessages", "serializedName": "requestMessages", "type": { - "$ref": "9661" + "$ref": "9683" }, "location": "Body", "isApiVersion": false, @@ -130892,7 +131350,7 @@ "decorators": [] }, { - "$id": "9665", + "$id": "9687", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -130910,7 +131368,7 @@ "decorators": [] }, { - "$id": "9666", + "$id": "9688", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -130929,7 +131387,7 @@ ], "response": { "type": { - "$ref": "9662" + "$ref": "9684" } }, "isOverride": false, @@ -130938,21 +131396,21 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.startRealtimeSession" }, { - "$id": "9667", + "$id": "9689", "kind": "basic", "name": "createEphemeralToken", "accessibility": "public", "apiVersions": [], "summary": "Create an ephemeral API token for use in client-side applications with the Realtime API. Can be configured with the same session parameters as the session.update client event.\n\nIt responds with a session object, plus a client_secret key which contains a usable ephemeral API token that can be used to authenticate browser clients for the Realtime API.", "operation": { - "$id": "9668", + "$id": "9690", "name": "createEphemeralToken", "resourceName": "Realtime", "summary": "Create an ephemeral API token for use in client-side applications with the Realtime API. Can be configured with the same session parameters as the session.update client event.\n\nIt responds with a session object, plus a client_secret key which contains a usable ephemeral API token that can be used to authenticate browser clients for the Realtime API.", "accessibility": "public", "parameters": [ { - "$id": "9669", + "$id": "9691", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -130969,7 +131427,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.createEphemeralToken.contentType" }, { - "$id": "9670", + "$id": "9692", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -130985,7 +131443,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.createEphemeralToken.accept" }, { - "$id": "9671", + "$id": "9693", "kind": "body", "name": "request", "serializedName": "request", @@ -131033,7 +131491,7 @@ }, "parameters": [ { - "$id": "9672", + "$id": "9694", "kind": "method", "name": "request", "serializedName": "request", @@ -131050,7 +131508,7 @@ "decorators": [] }, { - "$id": "9673", + "$id": "9695", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -131068,7 +131526,7 @@ "decorators": [] }, { - "$id": "9674", + "$id": "9696", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131096,21 +131554,21 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.createEphemeralToken" }, { - "$id": "9675", + "$id": "9697", "kind": "basic", "name": "createEphemeralTranscriptionToken", "accessibility": "public", "apiVersions": [], "summary": "Create an ephemeral API token for use in client-side applications with the Realtime API specifically for realtime transcriptions. Can be configured with the same session parameters as the transcription_session.update client event.\n\nIt responds with a session object, plus a client_secret key which contains a usable ephemeral API token that can be used to authenticate browser clients for the Realtime API.", "operation": { - "$id": "9676", + "$id": "9698", "name": "createEphemeralTranscriptionToken", "resourceName": "Realtime", "summary": "Create an ephemeral API token for use in client-side applications with the Realtime API specifically for realtime transcriptions. Can be configured with the same session parameters as the transcription_session.update client event.\n\nIt responds with a session object, plus a client_secret key which contains a usable ephemeral API token that can be used to authenticate browser clients for the Realtime API.", "accessibility": "public", "parameters": [ { - "$id": "9677", + "$id": "9699", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -131127,7 +131585,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.createEphemeralTranscriptionToken.contentType" }, { - "$id": "9678", + "$id": "9700", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -131143,7 +131601,7 @@ "crossLanguageDefinitionId": "OpenAI.Realtime.createEphemeralTranscriptionToken.accept" }, { - "$id": "9679", + "$id": "9701", "kind": "body", "name": "request", "serializedName": "request", @@ -131191,7 +131649,7 @@ }, "parameters": [ { - "$id": "9680", + "$id": "9702", "kind": "method", "name": "request", "serializedName": "request", @@ -131208,7 +131666,7 @@ "decorators": [] }, { - "$id": "9681", + "$id": "9703", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -131226,7 +131684,7 @@ "decorators": [] }, { - "$id": "9682", + "$id": "9704", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131256,13 +131714,13 @@ ], "parameters": [ { - "$id": "9683", + "$id": "9705", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9684", + "$id": "9706", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -131273,7 +131731,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9685", + "$id": "9707", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -131291,31 +131749,31 @@ "crossLanguageDefinitionId": "OpenAI.Realtime", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9686", + "$id": "9708", "kind": "client", "name": "Uploads", "namespace": "OpenAI", "methods": [ { - "$id": "9687", + "$id": "9709", "kind": "basic", "name": "createUpload", "accessibility": "public", "apiVersions": [], "summary": "Creates an intermediate [Upload](/docs/api-reference/uploads/object) object that you can add [Parts](/docs/api-reference/uploads/part-object) to. Currently, an Upload can accept at most 8 GB in total and expires after an hour after you create it.\n\nOnce you complete the Upload, we will create a [File](/docs/api-reference/files/object) object that contains all the parts you uploaded. This File is usable in the rest of our platform as a regular File object.\n\nFor certain `purpose`s, the correct `mime_type` must be specified. Please refer to documentation for the supported MIME types for your use case:\n- [Assistants](/docs/assistants/tools/file-search/supported-files)\n\nFor guidance on the proper filename extensions for each purpose, please follow the documentation on [creating a File](/docs/api-reference/files/create).", "operation": { - "$id": "9688", + "$id": "9710", "name": "createUpload", "resourceName": "Uploads", "summary": "Creates an intermediate [Upload](/docs/api-reference/uploads/object) object that you can add [Parts](/docs/api-reference/uploads/part-object) to. Currently, an Upload can accept at most 8 GB in total and expires after an hour after you create it.\n\nOnce you complete the Upload, we will create a [File](/docs/api-reference/files/object) object that contains all the parts you uploaded. This File is usable in the rest of our platform as a regular File object.\n\nFor certain `purpose`s, the correct `mime_type` must be specified. Please refer to documentation for the supported MIME types for your use case:\n- [Assistants](/docs/assistants/tools/file-search/supported-files)\n\nFor guidance on the proper filename extensions for each purpose, please follow the documentation on [creating a File](/docs/api-reference/files/create).", "accessibility": "public", "parameters": [ { - "$id": "9689", + "$id": "9711", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -131331,7 +131789,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.createUpload.accept" }, { - "$id": "9690", + "$id": "9712", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -131348,7 +131806,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.createUpload.contentType" }, { - "$id": "9691", + "$id": "9713", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -131396,7 +131854,7 @@ }, "parameters": [ { - "$id": "9692", + "$id": "9714", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131413,7 +131871,7 @@ "decorators": [] }, { - "$id": "9693", + "$id": "9715", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -131430,7 +131888,7 @@ "decorators": [] }, { - "$id": "9694", + "$id": "9716", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -131459,21 +131917,21 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.createUpload" }, { - "$id": "9695", + "$id": "9717", "kind": "basic", "name": "addUploadPart", "accessibility": "public", "apiVersions": [], "summary": "Adds a [Part](/docs/api-reference/uploads/part-object) to an [Upload](/docs/api-reference/uploads/object) object. A Part represents a chunk of bytes from the file you are trying to upload. \n\nEach Part can be at most 64 MB, and you can add Parts until you hit the Upload maximum of 8 GB.\n\nIt is possible to add multiple Parts in parallel. You can decide the intended order of the Parts when you [complete the Upload](/docs/api-reference/uploads/complete).", "operation": { - "$id": "9696", + "$id": "9718", "name": "addUploadPart", "resourceName": "Uploads", "summary": "Adds a [Part](/docs/api-reference/uploads/part-object) to an [Upload](/docs/api-reference/uploads/object) object. A Part represents a chunk of bytes from the file you are trying to upload. \n\nEach Part can be at most 64 MB, and you can add Parts until you hit the Upload maximum of 8 GB.\n\nIt is possible to add multiple Parts in parallel. You can decide the intended order of the Parts when you [complete the Upload](/docs/api-reference/uploads/complete).", "accessibility": "public", "parameters": [ { - "$id": "9697", + "$id": "9719", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -131489,7 +131947,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.addUploadPart.accept" }, { - "$id": "9698", + "$id": "9720", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -131505,12 +131963,12 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.addUploadPart.contentType" }, { - "$id": "9699", + "$id": "9721", "kind": "path", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9700", + "$id": "9722", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131528,7 +131986,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.addUploadPart.upload_id" }, { - "$id": "9701", + "$id": "9723", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -131576,7 +132034,7 @@ }, "parameters": [ { - "$id": "9702", + "$id": "9724", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131593,7 +132051,7 @@ "decorators": [] }, { - "$id": "9703", + "$id": "9725", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -131610,12 +132068,12 @@ "decorators": [] }, { - "$id": "9704", + "$id": "9726", "kind": "method", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9705", + "$id": "9727", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131631,7 +132089,7 @@ "decorators": [] }, { - "$id": "9706", + "$id": "9728", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -131659,21 +132117,21 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.addUploadPart" }, { - "$id": "9707", + "$id": "9729", "kind": "basic", "name": "completeUpload", "accessibility": "public", "apiVersions": [], "summary": "Completes the [Upload](/docs/api-reference/uploads/object). \n\nWithin the returned Upload object, there is a nested [File](/docs/api-reference/files/object) object that is ready to use in the rest of the platform.\n\nYou can specify the order of the Parts by passing in an ordered list of the Part IDs.\n\nThe number of bytes uploaded upon completion must match the number of bytes initially specified when creating the Upload object. No Parts may be added after an Upload is completed.", "operation": { - "$id": "9708", + "$id": "9730", "name": "completeUpload", "resourceName": "Uploads", "summary": "Completes the [Upload](/docs/api-reference/uploads/object). \n\nWithin the returned Upload object, there is a nested [File](/docs/api-reference/files/object) object that is ready to use in the rest of the platform.\n\nYou can specify the order of the Parts by passing in an ordered list of the Part IDs.\n\nThe number of bytes uploaded upon completion must match the number of bytes initially specified when creating the Upload object. No Parts may be added after an Upload is completed.", "accessibility": "public", "parameters": [ { - "$id": "9709", + "$id": "9731", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -131689,12 +132147,12 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.completeUpload.accept" }, { - "$id": "9710", + "$id": "9732", "kind": "path", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9711", + "$id": "9733", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131712,7 +132170,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.completeUpload.upload_id" }, { - "$id": "9712", + "$id": "9734", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -131729,7 +132187,7 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.completeUpload.contentType" }, { - "$id": "9713", + "$id": "9735", "kind": "body", "name": "requestBody", "serializedName": "requestBody", @@ -131777,7 +132235,7 @@ }, "parameters": [ { - "$id": "9714", + "$id": "9736", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131794,12 +132252,12 @@ "decorators": [] }, { - "$id": "9715", + "$id": "9737", "kind": "method", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9716", + "$id": "9738", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131815,7 +132273,7 @@ "decorators": [] }, { - "$id": "9717", + "$id": "9739", "kind": "method", "name": "requestBody", "serializedName": "requestBody", @@ -131832,7 +132290,7 @@ "decorators": [] }, { - "$id": "9718", + "$id": "9740", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -131861,21 +132319,21 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.completeUpload" }, { - "$id": "9719", + "$id": "9741", "kind": "basic", "name": "cancelUpload", "accessibility": "public", "apiVersions": [], "summary": "Cancels the Upload. No Parts may be added after an Upload is cancelled.", "operation": { - "$id": "9720", + "$id": "9742", "name": "cancelUpload", "resourceName": "Uploads", "summary": "Cancels the Upload. No Parts may be added after an Upload is cancelled.", "accessibility": "public", "parameters": [ { - "$id": "9721", + "$id": "9743", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -131891,12 +132349,12 @@ "crossLanguageDefinitionId": "OpenAI.Uploads.cancelUpload.accept" }, { - "$id": "9722", + "$id": "9744", "kind": "path", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9723", + "$id": "9745", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131940,7 +132398,7 @@ }, "parameters": [ { - "$id": "9724", + "$id": "9746", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -131957,12 +132415,12 @@ "decorators": [] }, { - "$id": "9725", + "$id": "9747", "kind": "method", "name": "upload_id", "serializedName": "upload_id", "type": { - "$id": "9726", + "$id": "9748", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -131991,13 +132449,13 @@ ], "parameters": [ { - "$id": "9727", + "$id": "9749", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9728", + "$id": "9750", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -132008,7 +132466,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9729", + "$id": "9751", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -132026,17 +132484,17 @@ "crossLanguageDefinitionId": "OpenAI.Uploads", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9730", + "$id": "9752", "kind": "client", "name": "Conversations", "namespace": "OpenAI", "methods": [ { - "$id": "9731", + "$id": "9753", "kind": "paging", "name": "GetConversationItems", "accessibility": "public", @@ -132044,7 +132502,7 @@ "doc": "List all items for a conversation with the given ID.", "summary": "List items", "operation": { - "$id": "9732", + "$id": "9754", "name": "GetConversationItems", "resourceName": "OpenAI", "summary": "List items", @@ -132052,13 +132510,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9733", + "$id": "9755", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to list items for.", "type": { - "$id": "9734", + "$id": "9756", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132076,13 +132534,13 @@ "crossLanguageDefinitionId": "OpenAI.listConversationItems.conversation_id" }, { - "$id": "9735", + "$id": "9757", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between\n1 and 100, and the default is 20.", "type": { - "$id": "9736", + "$id": "9758", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -132097,18 +132555,18 @@ "readOnly": false }, { - "$id": "9737", + "$id": "9759", "kind": "query", "name": "order", "serializedName": "order", "doc": "The order to return the input items in. Default is `desc`.\n- `asc`: Return the input items in ascending order.\n- `desc`: Return the input items in descending order.", "type": { - "$id": "9738", + "$id": "9760", "kind": "enum", "name": "ListConversationItemsRequestOrder", "crossLanguageDefinitionId": "OpenAI.listConversationItems.RequestOrder.anonymous", "valueType": { - "$id": "9739", + "$id": "9761", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132116,28 +132574,28 @@ }, "values": [ { - "$id": "9740", + "$id": "9762", "kind": "enumvalue", "name": "asc", "value": "asc", "valueType": { - "$ref": "9739" + "$ref": "9761" }, "enumType": { - "$ref": "9738" + "$ref": "9760" }, "decorators": [] }, { - "$id": "9741", + "$id": "9763", "kind": "enumvalue", "name": "desc", "value": "desc", "valueType": { - "$ref": "9739" + "$ref": "9761" }, "enumType": { - "$ref": "9738" + "$ref": "9760" }, "decorators": [] } @@ -132157,13 +132615,13 @@ "readOnly": false }, { - "$id": "9742", + "$id": "9764", "kind": "query", "name": "after", "serializedName": "after", "doc": "An item ID to list items after, used in pagination.", "type": { - "$id": "9743", + "$id": "9765", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132178,22 +132636,22 @@ "readOnly": false }, { - "$id": "9744", + "$id": "9766", "kind": "query", "name": "include", "serializedName": "include", "doc": "Specify additional output data to include in the model response. Currently supported values are:\n- `web_search_call.action.sources`: Include the sources of the web search tool call.\n- `code_interpreter_call.outputs`: Includes the outputs of python code execution in code interpreter tool call items.\n- `computer_call_output.output.image_url`: Include image urls from the computer call output.\n- `file_search_call.results`: Include the search results of the file search tool call.\n- `message.input_image.image_url`: Include image urls from the input message.\n- `message.output_text.logprobs`: Include logprobs with assistant messages.\n- `reasoning.encrypted_content`: Includes an encrypted version of reasoning tokens in reasoning item outputs. This enables reasoning items to be used in multi-turn conversations when using the Responses API statelessly (like when the `store` parameter is set to `false`, or when an organization is enrolled in the zero data retention program).", "type": { - "$id": "9745", + "$id": "9767", "kind": "array", "name": "ArrayIncludeEnum", "valueType": { - "$id": "9746", + "$id": "9768", "kind": "enum", "name": "IncludeEnum", "crossLanguageDefinitionId": "OpenAI.IncludeEnum", "valueType": { - "$id": "9747", + "$id": "9769", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132201,106 +132659,106 @@ }, "values": [ { - "$id": "9748", + "$id": "9770", "kind": "enumvalue", "name": "file_search_call.results", "value": "file_search_call.results", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9749", + "$id": "9771", "kind": "enumvalue", "name": "web_search_call.results", "value": "web_search_call.results", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9750", + "$id": "9772", "kind": "enumvalue", "name": "web_search_call.action.sources", "value": "web_search_call.action.sources", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9751", + "$id": "9773", "kind": "enumvalue", "name": "message.input_image.image_url", "value": "message.input_image.image_url", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9752", + "$id": "9774", "kind": "enumvalue", "name": "computer_call_output.output.image_url", "value": "computer_call_output.output.image_url", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9753", + "$id": "9775", "kind": "enumvalue", "name": "code_interpreter_call.outputs", "value": "code_interpreter_call.outputs", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9754", + "$id": "9776", "kind": "enumvalue", "name": "reasoning.encrypted_content", "value": "reasoning.encrypted_content", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] }, { - "$id": "9755", + "$id": "9777", "kind": "enumvalue", "name": "message.output_text.logprobs", "value": "message.output_text.logprobs", "valueType": { - "$ref": "9747" + "$ref": "9769" }, "enumType": { - "$ref": "9746" + "$ref": "9768" }, "decorators": [] } @@ -132324,7 +132782,7 @@ "readOnly": false }, { - "$id": "9756", + "$id": "9778", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -132346,7 +132804,7 @@ 200 ], "bodyType": { - "$id": "9757", + "$id": "9779", "kind": "model", "name": "ConversationItemList", "namespace": "OpenAI", @@ -132357,7 +132815,7 @@ "decorators": [], "properties": [ { - "$id": "9758", + "$id": "9780", "kind": "property", "name": "object", "doc": "The type of object returned, must be `list`.", @@ -132374,7 +132832,7 @@ "isHttpMetadata": false }, { - "$id": "9759", + "$id": "9781", "kind": "property", "name": "data", "doc": "A list of conversation items.", @@ -132391,12 +132849,12 @@ "isHttpMetadata": false }, { - "$id": "9760", + "$id": "9782", "kind": "property", "name": "has_more", "doc": "Whether there are more items available.", "type": { - "$id": "9761", + "$id": "9783", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -132412,12 +132870,12 @@ "isHttpMetadata": false }, { - "$id": "9762", + "$id": "9784", "kind": "property", "name": "first_id", "doc": "The ID of the first item in the list.", "type": { - "$id": "9763", + "$id": "9785", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132433,12 +132891,12 @@ "isHttpMetadata": false }, { - "$id": "9764", + "$id": "9786", "kind": "property", "name": "last_id", "doc": "The ID of the last item in the list.", "type": { - "$id": "9765", + "$id": "9787", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132473,13 +132931,13 @@ }, "parameters": [ { - "$id": "9766", + "$id": "9788", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to list items for.", "type": { - "$id": "9767", + "$id": "9789", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132495,13 +132953,13 @@ "decorators": [] }, { - "$id": "9768", + "$id": "9790", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "A limit on the number of objects to be returned. Limit can range between\n1 and 100, and the default is 20.", "type": { - "$id": "9769", + "$id": "9791", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -132517,13 +132975,13 @@ "decorators": [] }, { - "$id": "9770", + "$id": "9792", "kind": "method", "name": "order", "serializedName": "order", "doc": "The order to return the input items in. Default is `desc`.\n- `asc`: Return the input items in ascending order.\n- `desc`: Return the input items in descending order.", "type": { - "$ref": "9738" + "$ref": "9760" }, "location": "Query", "isApiVersion": false, @@ -132535,13 +132993,13 @@ "decorators": [] }, { - "$id": "9771", + "$id": "9793", "kind": "method", "name": "after", "serializedName": "after", "doc": "An item ID to list items after, used in pagination.", "type": { - "$id": "9772", + "$id": "9794", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132557,13 +133015,13 @@ "decorators": [] }, { - "$id": "9773", + "$id": "9795", "kind": "method", "name": "include", "serializedName": "include", "doc": "Specify additional output data to include in the model response. Currently supported values are:\n- `web_search_call.action.sources`: Include the sources of the web search tool call.\n- `code_interpreter_call.outputs`: Includes the outputs of python code execution in code interpreter tool call items.\n- `computer_call_output.output.image_url`: Include image urls from the computer call output.\n- `file_search_call.results`: Include the search results of the file search tool call.\n- `message.input_image.image_url`: Include image urls from the input message.\n- `message.output_text.logprobs`: Include logprobs with assistant messages.\n- `reasoning.encrypted_content`: Includes an encrypted version of reasoning tokens in reasoning item outputs. This enables reasoning items to be used in multi-turn conversations when using the Responses API statelessly (like when the `store` parameter is set to `false`, or when an organization is enrolled in the zero data retention program).", "type": { - "$ref": "9745" + "$ref": "9767" }, "location": "Query", "isApiVersion": false, @@ -132575,7 +133033,7 @@ "decorators": [] }, { - "$id": "9774", + "$id": "9796", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -132611,7 +133069,7 @@ } }, { - "$id": "9775", + "$id": "9797", "kind": "basic", "name": "createConversationItems", "accessibility": "public", @@ -132619,7 +133077,7 @@ "doc": "Create items in a conversation with the given ID.", "summary": "Create items", "operation": { - "$id": "9776", + "$id": "9798", "name": "createConversationItems", "resourceName": "OpenAI", "summary": "Create items", @@ -132627,13 +133085,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9777", + "$id": "9799", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to add the item to.", "type": { - "$id": "9778", + "$id": "9800", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132651,13 +133109,13 @@ "crossLanguageDefinitionId": "OpenAI.createConversationItems.conversation_id" }, { - "$id": "9779", + "$id": "9801", "kind": "query", "name": "include", "serializedName": "include", "doc": "Additional fields to include in the response. See the `include`\nparameter for [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) for more information.", "type": { - "$ref": "9745" + "$ref": "9767" }, "isApiVersion": false, "explode": true, @@ -132668,7 +133126,7 @@ "readOnly": false }, { - "$id": "9780", + "$id": "9802", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -132685,7 +133143,7 @@ "crossLanguageDefinitionId": "OpenAI.createConversationItems.contentType" }, { - "$id": "9781", + "$id": "9803", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -132701,12 +133159,12 @@ "crossLanguageDefinitionId": "OpenAI.createConversationItems.accept" }, { - "$id": "9782", + "$id": "9804", "kind": "body", "name": "body", "serializedName": "body", "type": { - "$id": "9783", + "$id": "9805", "kind": "model", "name": "CreateConversationItemsParametersBody", "namespace": "OpenAI", @@ -132715,7 +133173,7 @@ "decorators": [], "properties": [ { - "$id": "9784", + "$id": "9806", "kind": "property", "name": "items", "type": { @@ -132750,7 +133208,7 @@ 200 ], "bodyType": { - "$ref": "9757" + "$ref": "9779" }, "headers": [], "isErrorResponse": false, @@ -132773,13 +133231,13 @@ }, "parameters": [ { - "$id": "9785", + "$id": "9807", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to add the item to.", "type": { - "$id": "9786", + "$id": "9808", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132795,13 +133253,13 @@ "decorators": [] }, { - "$id": "9787", + "$id": "9809", "kind": "method", "name": "include", "serializedName": "include", "doc": "Additional fields to include in the response. See the `include`\nparameter for [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) for more information.", "type": { - "$ref": "9745" + "$ref": "9767" }, "location": "Query", "isApiVersion": false, @@ -132813,12 +133271,12 @@ "decorators": [] }, { - "$id": "9788", + "$id": "9810", "kind": "method", "name": "body", "serializedName": "body", "type": { - "$ref": "9783" + "$ref": "9805" }, "location": "Body", "isApiVersion": false, @@ -132830,7 +133288,7 @@ "decorators": [] }, { - "$id": "9789", + "$id": "9811", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -132848,7 +133306,7 @@ "decorators": [] }, { - "$id": "9790", + "$id": "9812", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -132867,7 +133325,7 @@ ], "response": { "type": { - "$ref": "9757" + "$ref": "9779" } }, "isOverride": false, @@ -132876,7 +133334,7 @@ "crossLanguageDefinitionId": "OpenAI.createConversationItems" }, { - "$id": "9791", + "$id": "9813", "kind": "basic", "name": "deleteConversationItem", "accessibility": "public", @@ -132884,7 +133342,7 @@ "doc": "Delete an item from a conversation with the given IDs.", "summary": "Delete an item", "operation": { - "$id": "9792", + "$id": "9814", "name": "deleteConversationItem", "resourceName": "OpenAI", "summary": "Delete an item", @@ -132892,13 +133350,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9793", + "$id": "9815", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation that contains the item.", "type": { - "$id": "9794", + "$id": "9816", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132916,13 +133374,13 @@ "crossLanguageDefinitionId": "OpenAI.deleteConversationItem.conversation_id" }, { - "$id": "9795", + "$id": "9817", "kind": "path", "name": "item_id", "serializedName": "item_id", "doc": "The ID of the item to delete.", "type": { - "$id": "9796", + "$id": "9818", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132940,7 +133398,7 @@ "crossLanguageDefinitionId": "OpenAI.deleteConversationItem.item_id" }, { - "$id": "9797", + "$id": "9819", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -132962,7 +133420,7 @@ 200 ], "bodyType": { - "$id": "9798", + "$id": "9820", "kind": "model", "name": "ConversationResource", "namespace": "OpenAI", @@ -132971,12 +133429,12 @@ "decorators": [], "properties": [ { - "$id": "9799", + "$id": "9821", "kind": "property", "name": "id", "doc": "The unique ID of the conversation.", "type": { - "$id": "9800", + "$id": "9822", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -132992,7 +133450,7 @@ "isHttpMetadata": false }, { - "$id": "9801", + "$id": "9823", "kind": "property", "name": "object", "doc": "The object type, which is always `conversation`.", @@ -133009,12 +133467,12 @@ "isHttpMetadata": false }, { - "$id": "9802", + "$id": "9824", "kind": "property", "name": "metadata", "doc": "Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.\n Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.", "type": { - "$id": "9803", + "$id": "9825", "kind": "unknown", "name": "unknown", "crossLanguageDefinitionId": "", @@ -133030,12 +133488,12 @@ "isHttpMetadata": false }, { - "$id": "9804", + "$id": "9826", "kind": "property", "name": "created_at", "doc": "The time at which the conversation was created, measured in seconds since the Unix epoch.", "type": { - "$id": "9805", + "$id": "9827", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -133070,13 +133528,13 @@ }, "parameters": [ { - "$id": "9806", + "$id": "9828", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation that contains the item.", "type": { - "$id": "9807", + "$id": "9829", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133092,13 +133550,13 @@ "decorators": [] }, { - "$id": "9808", + "$id": "9830", "kind": "method", "name": "item_id", "serializedName": "item_id", "doc": "The ID of the item to delete.", "type": { - "$id": "9809", + "$id": "9831", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133114,7 +133572,7 @@ "decorators": [] }, { - "$id": "9810", + "$id": "9832", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -133133,7 +133591,7 @@ ], "response": { "type": { - "$ref": "9798" + "$ref": "9820" } }, "isOverride": false, @@ -133142,7 +133600,7 @@ "crossLanguageDefinitionId": "OpenAI.deleteConversationItem" }, { - "$id": "9811", + "$id": "9833", "kind": "basic", "name": "getConversationItem", "accessibility": "public", @@ -133150,7 +133608,7 @@ "doc": "Get a single item from a conversation with the given IDs.", "summary": "Retrieve an item", "operation": { - "$id": "9812", + "$id": "9834", "name": "getConversationItem", "resourceName": "OpenAI", "summary": "Retrieve an item", @@ -133158,13 +133616,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9813", + "$id": "9835", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation that contains the item.", "type": { - "$id": "9814", + "$id": "9836", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133182,13 +133640,13 @@ "crossLanguageDefinitionId": "OpenAI.getConversationItem.conversation_id" }, { - "$id": "9815", + "$id": "9837", "kind": "path", "name": "item_id", "serializedName": "item_id", "doc": "The ID of the item to retrieve.", "type": { - "$id": "9816", + "$id": "9838", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133206,13 +133664,13 @@ "crossLanguageDefinitionId": "OpenAI.getConversationItem.item_id" }, { - "$id": "9817", + "$id": "9839", "kind": "query", "name": "include", "serializedName": "include", "doc": "Additional fields to include in the response. See the `include`\nparameter for [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) for more information.", "type": { - "$ref": "9745" + "$ref": "9767" }, "isApiVersion": false, "explode": true, @@ -133223,7 +133681,7 @@ "readOnly": false }, { - "$id": "9818", + "$id": "9840", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -133265,13 +133723,13 @@ }, "parameters": [ { - "$id": "9819", + "$id": "9841", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation that contains the item.", "type": { - "$id": "9820", + "$id": "9842", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133287,13 +133745,13 @@ "decorators": [] }, { - "$id": "9821", + "$id": "9843", "kind": "method", "name": "item_id", "serializedName": "item_id", "doc": "The ID of the item to retrieve.", "type": { - "$id": "9822", + "$id": "9844", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133309,13 +133767,13 @@ "decorators": [] }, { - "$id": "9823", + "$id": "9845", "kind": "method", "name": "include", "serializedName": "include", "doc": "Additional fields to include in the response. See the `include`\nparameter for [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) for more information.", "type": { - "$ref": "9745" + "$ref": "9767" }, "location": "Query", "isApiVersion": false, @@ -133327,7 +133785,7 @@ "decorators": [] }, { - "$id": "9824", + "$id": "9846", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -133355,7 +133813,7 @@ "crossLanguageDefinitionId": "OpenAI.getConversationItem" }, { - "$id": "9825", + "$id": "9847", "kind": "basic", "name": "createConversation", "accessibility": "public", @@ -133363,7 +133821,7 @@ "doc": "Create a conversation.", "summary": "Create a conversation", "operation": { - "$id": "9826", + "$id": "9848", "name": "createConversation", "resourceName": "OpenAI", "summary": "Create a conversation", @@ -133371,7 +133829,7 @@ "accessibility": "public", "parameters": [ { - "$id": "9827", + "$id": "9849", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -133388,7 +133846,7 @@ "crossLanguageDefinitionId": "OpenAI.createConversation.contentType" }, { - "$id": "9828", + "$id": "9850", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -133404,12 +133862,12 @@ "crossLanguageDefinitionId": "OpenAI.createConversation.accept" }, { - "$id": "9829", + "$id": "9851", "kind": "body", "name": "body", "serializedName": "body", "type": { - "$id": "9830", + "$id": "9852", "kind": "model", "name": "CreateConversationBody", "namespace": "OpenAI", @@ -133418,14 +133876,14 @@ "decorators": [], "properties": [ { - "$id": "9831", + "$id": "9853", "kind": "property", "name": "metadata", "type": { - "$id": "9832", + "$id": "9854", "kind": "nullable", "type": { - "$id": "9833", + "$id": "9855", "kind": "model", "name": "Metadata", "namespace": "OpenAI", @@ -133447,11 +133905,11 @@ "isHttpMetadata": false }, { - "$id": "9834", + "$id": "9856", "kind": "property", "name": "items", "type": { - "$id": "9835", + "$id": "9857", "kind": "nullable", "type": { "$ref": "5206" @@ -133487,7 +133945,7 @@ 200 ], "bodyType": { - "$ref": "9798" + "$ref": "9820" }, "headers": [], "isErrorResponse": false, @@ -133510,12 +133968,12 @@ }, "parameters": [ { - "$id": "9836", + "$id": "9858", "kind": "method", "name": "body", "serializedName": "body", "type": { - "$ref": "9830" + "$ref": "9852" }, "location": "Body", "isApiVersion": false, @@ -133527,7 +133985,7 @@ "decorators": [] }, { - "$id": "9837", + "$id": "9859", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -133545,7 +134003,7 @@ "decorators": [] }, { - "$id": "9838", + "$id": "9860", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -133564,7 +134022,7 @@ ], "response": { "type": { - "$ref": "9798" + "$ref": "9820" } }, "isOverride": false, @@ -133573,7 +134031,7 @@ "crossLanguageDefinitionId": "OpenAI.createConversation" }, { - "$id": "9839", + "$id": "9861", "kind": "basic", "name": "deleteConversation", "accessibility": "public", @@ -133581,7 +134039,7 @@ "doc": "Delete a conversation. Items in the conversation will not be deleted.", "summary": "Delete a conversation", "operation": { - "$id": "9840", + "$id": "9862", "name": "deleteConversation", "resourceName": "OpenAI", "summary": "Delete a conversation", @@ -133589,13 +134047,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9841", + "$id": "9863", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to delete.", "type": { - "$id": "9842", + "$id": "9864", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133613,7 +134071,7 @@ "crossLanguageDefinitionId": "OpenAI.deleteConversation.conversation_id" }, { - "$id": "9843", + "$id": "9865", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -133635,7 +134093,7 @@ 200 ], "bodyType": { - "$id": "9844", + "$id": "9866", "kind": "model", "name": "DeletedConversationResource", "namespace": "OpenAI", @@ -133644,7 +134102,7 @@ "decorators": [], "properties": [ { - "$id": "9845", + "$id": "9867", "kind": "property", "name": "object", "type": { @@ -133660,11 +134118,11 @@ "isHttpMetadata": false }, { - "$id": "9846", + "$id": "9868", "kind": "property", "name": "deleted", "type": { - "$id": "9847", + "$id": "9869", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -133680,11 +134138,11 @@ "isHttpMetadata": false }, { - "$id": "9848", + "$id": "9870", "kind": "property", "name": "id", "type": { - "$id": "9849", + "$id": "9871", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133719,13 +134177,13 @@ }, "parameters": [ { - "$id": "9850", + "$id": "9872", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to delete.", "type": { - "$id": "9851", + "$id": "9873", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133741,7 +134199,7 @@ "decorators": [] }, { - "$id": "9852", + "$id": "9874", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -133760,7 +134218,7 @@ ], "response": { "type": { - "$ref": "9844" + "$ref": "9866" } }, "isOverride": false, @@ -133769,7 +134227,7 @@ "crossLanguageDefinitionId": "OpenAI.deleteConversation" }, { - "$id": "9853", + "$id": "9875", "kind": "basic", "name": "getConversation", "accessibility": "public", @@ -133777,7 +134235,7 @@ "doc": "Get a conversation", "summary": "Retrieve a conversation", "operation": { - "$id": "9854", + "$id": "9876", "name": "getConversation", "resourceName": "OpenAI", "summary": "Retrieve a conversation", @@ -133785,13 +134243,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9855", + "$id": "9877", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to retrieve.", "type": { - "$id": "9856", + "$id": "9878", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133809,7 +134267,7 @@ "crossLanguageDefinitionId": "OpenAI.getConversation.conversation_id" }, { - "$id": "9857", + "$id": "9879", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -133831,7 +134289,7 @@ 200 ], "bodyType": { - "$ref": "9798" + "$ref": "9820" }, "headers": [], "isErrorResponse": false, @@ -133851,13 +134309,13 @@ }, "parameters": [ { - "$id": "9858", + "$id": "9880", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to retrieve.", "type": { - "$id": "9859", + "$id": "9881", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133873,7 +134331,7 @@ "decorators": [] }, { - "$id": "9860", + "$id": "9882", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -133892,7 +134350,7 @@ ], "response": { "type": { - "$ref": "9798" + "$ref": "9820" } }, "isOverride": false, @@ -133901,7 +134359,7 @@ "crossLanguageDefinitionId": "OpenAI.getConversation" }, { - "$id": "9861", + "$id": "9883", "kind": "basic", "name": "updateConversation", "accessibility": "public", @@ -133909,7 +134367,7 @@ "doc": "Update a conversation", "summary": "Update a conversation", "operation": { - "$id": "9862", + "$id": "9884", "name": "updateConversation", "resourceName": "OpenAI", "summary": "Update a conversation", @@ -133917,13 +134375,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9863", + "$id": "9885", "kind": "path", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to update.", "type": { - "$id": "9864", + "$id": "9886", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -133941,7 +134399,7 @@ "crossLanguageDefinitionId": "OpenAI.updateConversation.conversation_id" }, { - "$id": "9865", + "$id": "9887", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -133958,7 +134416,7 @@ "crossLanguageDefinitionId": "OpenAI.updateConversation.contentType" }, { - "$id": "9866", + "$id": "9888", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -133974,12 +134432,12 @@ "crossLanguageDefinitionId": "OpenAI.updateConversation.accept" }, { - "$id": "9867", + "$id": "9889", "kind": "body", "name": "body", "serializedName": "body", "type": { - "$id": "9868", + "$id": "9890", "kind": "model", "name": "UpdateConversationBody", "namespace": "OpenAI", @@ -133988,15 +134446,15 @@ "decorators": [], "properties": [ { - "$id": "9869", + "$id": "9891", "kind": "property", "name": "metadata", "doc": "Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.\n Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.", "type": { - "$id": "9870", + "$id": "9892", "kind": "nullable", "type": { - "$ref": "9833" + "$ref": "9855" }, "namespace": "OpenAI" }, @@ -134029,7 +134487,7 @@ 200 ], "bodyType": { - "$ref": "9798" + "$ref": "9820" }, "headers": [], "isErrorResponse": false, @@ -134052,13 +134510,13 @@ }, "parameters": [ { - "$id": "9871", + "$id": "9893", "kind": "method", "name": "conversation_id", "serializedName": "conversation_id", "doc": "The ID of the conversation to update.", "type": { - "$id": "9872", + "$id": "9894", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134074,12 +134532,12 @@ "decorators": [] }, { - "$id": "9873", + "$id": "9895", "kind": "method", "name": "body", "serializedName": "body", "type": { - "$ref": "9868" + "$ref": "9890" }, "location": "Body", "isApiVersion": false, @@ -134091,7 +134549,7 @@ "decorators": [] }, { - "$id": "9874", + "$id": "9896", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -134109,7 +134567,7 @@ "decorators": [] }, { - "$id": "9875", + "$id": "9897", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -134128,7 +134586,7 @@ ], "response": { "type": { - "$ref": "9798" + "$ref": "9820" } }, "isOverride": false, @@ -134139,13 +134597,13 @@ ], "parameters": [ { - "$id": "9876", + "$id": "9898", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "9877", + "$id": "9899", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -134156,7 +134614,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "9878", + "$id": "9900", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -134174,17 +134632,17 @@ "crossLanguageDefinitionId": "OpenAI", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } }, { - "$id": "9879", + "$id": "9901", "kind": "client", "name": "Videos", "namespace": "OpenAI", "methods": [ { - "$id": "9880", + "$id": "9902", "kind": "paging", "name": "ListVideos", "accessibility": "public", @@ -134192,7 +134650,7 @@ "doc": "List videos", "summary": "List videos", "operation": { - "$id": "9881", + "$id": "9903", "name": "ListVideos", "resourceName": "OpenAI", "summary": "List videos", @@ -134200,13 +134658,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9882", + "$id": "9904", "kind": "query", "name": "limit", "serializedName": "limit", "doc": "Number of items to retrieve", "type": { - "$id": "9883", + "$id": "9905", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -134221,18 +134679,18 @@ "readOnly": false }, { - "$id": "9884", + "$id": "9906", "kind": "query", "name": "order", "serializedName": "order", "doc": "Sort order of results by timestamp. Use `asc` for ascending order or `desc` for descending order.", "type": { - "$id": "9885", + "$id": "9907", "kind": "enum", "name": "OrderEnum", "crossLanguageDefinitionId": "OpenAI.OrderEnum", "valueType": { - "$id": "9886", + "$id": "9908", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134240,28 +134698,28 @@ }, "values": [ { - "$id": "9887", + "$id": "9909", "kind": "enumvalue", "name": "asc", "value": "asc", "valueType": { - "$ref": "9886" + "$ref": "9908" }, "enumType": { - "$ref": "9885" + "$ref": "9907" }, "decorators": [] }, { - "$id": "9888", + "$id": "9910", "kind": "enumvalue", "name": "desc", "value": "desc", "valueType": { - "$ref": "9886" + "$ref": "9908" }, "enumType": { - "$ref": "9885" + "$ref": "9907" }, "decorators": [] } @@ -134281,13 +134739,13 @@ "readOnly": false }, { - "$id": "9889", + "$id": "9911", "kind": "query", "name": "after", "serializedName": "after", "doc": "Identifier for the last item from the previous pagination request", "type": { - "$id": "9890", + "$id": "9912", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134302,7 +134760,7 @@ "readOnly": false }, { - "$id": "9891", + "$id": "9913", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -134324,7 +134782,7 @@ 200 ], "bodyType": { - "$id": "9892", + "$id": "9914", "kind": "model", "name": "VideoListResource", "namespace": "OpenAI", @@ -134333,7 +134791,7 @@ "decorators": [], "properties": [ { - "$id": "9893", + "$id": "9915", "kind": "property", "name": "object", "doc": "The type of object returned, must be `list`.", @@ -134350,16 +134808,16 @@ "isHttpMetadata": false }, { - "$id": "9894", + "$id": "9916", "kind": "property", "name": "data", "doc": "A list of items", "type": { - "$id": "9895", + "$id": "9917", "kind": "array", "name": "ArrayVideoResource", "valueType": { - "$id": "9896", + "$id": "9918", "kind": "model", "name": "VideoResource", "namespace": "OpenAI", @@ -134370,12 +134828,12 @@ "decorators": [], "properties": [ { - "$id": "9897", + "$id": "9919", "kind": "property", "name": "id", "doc": "Unique identifier for the video job.", "type": { - "$id": "9898", + "$id": "9920", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134391,7 +134849,7 @@ "isHttpMetadata": false }, { - "$id": "9899", + "$id": "9921", "kind": "property", "name": "object", "doc": "The object type, which is always `video`.", @@ -134408,17 +134866,17 @@ "isHttpMetadata": false }, { - "$id": "9900", + "$id": "9922", "kind": "property", "name": "model", "doc": "The video generation model that produced the job.", "type": { - "$id": "9901", + "$id": "9923", "kind": "enum", "name": "VideoModel", "crossLanguageDefinitionId": "OpenAI.VideoModel", "valueType": { - "$id": "9902", + "$id": "9924", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134426,28 +134884,28 @@ }, "values": [ { - "$id": "9903", + "$id": "9925", "kind": "enumvalue", "name": "sora-2", "value": "sora-2", "valueType": { - "$ref": "9902" + "$ref": "9924" }, "enumType": { - "$ref": "9901" + "$ref": "9923" }, "decorators": [] }, { - "$id": "9904", + "$id": "9926", "kind": "enumvalue", "name": "sora-2-pro", "value": "sora-2-pro", "valueType": { - "$ref": "9902" + "$ref": "9924" }, "enumType": { - "$ref": "9901" + "$ref": "9923" }, "decorators": [] } @@ -134468,17 +134926,17 @@ "isHttpMetadata": false }, { - "$id": "9905", + "$id": "9927", "kind": "property", "name": "status", "doc": "Current lifecycle status of the video job.", "type": { - "$id": "9906", + "$id": "9928", "kind": "enum", "name": "VideoStatus", "crossLanguageDefinitionId": "OpenAI.VideoStatus", "valueType": { - "$id": "9907", + "$id": "9929", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134486,54 +134944,54 @@ }, "values": [ { - "$id": "9908", + "$id": "9930", "kind": "enumvalue", "name": "queued", "value": "queued", "valueType": { - "$ref": "9907" + "$ref": "9929" }, "enumType": { - "$ref": "9906" + "$ref": "9928" }, "decorators": [] }, { - "$id": "9909", + "$id": "9931", "kind": "enumvalue", "name": "in_progress", "value": "in_progress", "valueType": { - "$ref": "9907" + "$ref": "9929" }, "enumType": { - "$ref": "9906" + "$ref": "9928" }, "decorators": [] }, { - "$id": "9910", + "$id": "9932", "kind": "enumvalue", "name": "completed", "value": "completed", "valueType": { - "$ref": "9907" + "$ref": "9929" }, "enumType": { - "$ref": "9906" + "$ref": "9928" }, "decorators": [] }, { - "$id": "9911", + "$id": "9933", "kind": "enumvalue", "name": "failed", "value": "failed", "valueType": { - "$ref": "9907" + "$ref": "9929" }, "enumType": { - "$ref": "9906" + "$ref": "9928" }, "decorators": [] } @@ -134554,12 +135012,12 @@ "isHttpMetadata": false }, { - "$id": "9912", + "$id": "9934", "kind": "property", "name": "progress", "doc": "Approximate completion percentage for the generation task.", "type": { - "$id": "9913", + "$id": "9935", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -134575,12 +135033,12 @@ "isHttpMetadata": false }, { - "$id": "9914", + "$id": "9936", "kind": "property", "name": "created_at", "doc": "Unix timestamp (seconds) for when the job was created.", "type": { - "$id": "9915", + "$id": "9937", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -134596,14 +135054,14 @@ "isHttpMetadata": false }, { - "$id": "9916", + "$id": "9938", "kind": "property", "name": "completed_at", "type": { - "$id": "9917", + "$id": "9939", "kind": "nullable", "type": { - "$id": "9918", + "$id": "9940", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -134621,14 +135079,14 @@ "isHttpMetadata": false }, { - "$id": "9919", + "$id": "9941", "kind": "property", "name": "expires_at", "type": { - "$id": "9920", + "$id": "9942", "kind": "nullable", "type": { - "$id": "9921", + "$id": "9943", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -134646,17 +135104,17 @@ "isHttpMetadata": false }, { - "$id": "9922", + "$id": "9944", "kind": "property", "name": "size", "doc": "The resolution of the generated video.", "type": { - "$id": "9923", + "$id": "9945", "kind": "enum", "name": "VideoSize", "crossLanguageDefinitionId": "OpenAI.VideoSize", "valueType": { - "$id": "9924", + "$id": "9946", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134664,54 +135122,54 @@ }, "values": [ { - "$id": "9925", + "$id": "9947", "kind": "enumvalue", "name": "720x1280", "value": "720x1280", "valueType": { - "$ref": "9924" + "$ref": "9946" }, "enumType": { - "$ref": "9923" + "$ref": "9945" }, "decorators": [] }, { - "$id": "9926", + "$id": "9948", "kind": "enumvalue", "name": "1280x720", "value": "1280x720", "valueType": { - "$ref": "9924" + "$ref": "9946" }, "enumType": { - "$ref": "9923" + "$ref": "9945" }, "decorators": [] }, { - "$id": "9927", + "$id": "9949", "kind": "enumvalue", "name": "1024x1792", "value": "1024x1792", "valueType": { - "$ref": "9924" + "$ref": "9946" }, "enumType": { - "$ref": "9923" + "$ref": "9945" }, "decorators": [] }, { - "$id": "9928", + "$id": "9950", "kind": "enumvalue", "name": "1792x1024", "value": "1792x1024", "valueType": { - "$ref": "9924" + "$ref": "9946" }, "enumType": { - "$ref": "9923" + "$ref": "9945" }, "decorators": [] } @@ -134732,17 +135190,17 @@ "isHttpMetadata": false }, { - "$id": "9929", + "$id": "9951", "kind": "property", "name": "seconds", "doc": "Duration of the generated clip in seconds.", "type": { - "$id": "9930", + "$id": "9952", "kind": "enum", "name": "VideoSeconds", "crossLanguageDefinitionId": "OpenAI.VideoSeconds", "valueType": { - "$id": "9931", + "$id": "9953", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134750,41 +135208,41 @@ }, "values": [ { - "$id": "9932", + "$id": "9954", "kind": "enumvalue", "name": "4", "value": "4", "valueType": { - "$ref": "9931" + "$ref": "9953" }, "enumType": { - "$ref": "9930" + "$ref": "9952" }, "decorators": [] }, { - "$id": "9933", + "$id": "9955", "kind": "enumvalue", "name": "8", "value": "8", "valueType": { - "$ref": "9931" + "$ref": "9953" }, "enumType": { - "$ref": "9930" + "$ref": "9952" }, "decorators": [] }, { - "$id": "9934", + "$id": "9956", "kind": "enumvalue", "name": "12", "value": "12", "valueType": { - "$ref": "9931" + "$ref": "9953" }, "enumType": { - "$ref": "9930" + "$ref": "9952" }, "decorators": [] } @@ -134805,14 +135263,14 @@ "isHttpMetadata": false }, { - "$id": "9935", + "$id": "9957", "kind": "property", "name": "remixed_from_video_id", "type": { - "$id": "9936", + "$id": "9958", "kind": "nullable", "type": { - "$id": "9937", + "$id": "9959", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134830,14 +135288,14 @@ "isHttpMetadata": false }, { - "$id": "9938", + "$id": "9960", "kind": "property", "name": "error", "type": { - "$id": "9939", + "$id": "9961", "kind": "nullable", "type": { - "$id": "9940", + "$id": "9962", "kind": "model", "name": "Error-2", "namespace": "OpenAI", @@ -134846,11 +135304,11 @@ "decorators": [], "properties": [ { - "$id": "9941", + "$id": "9963", "kind": "property", "name": "code", "type": { - "$id": "9942", + "$id": "9964", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134866,11 +135324,11 @@ "isHttpMetadata": false }, { - "$id": "9943", + "$id": "9965", "kind": "property", "name": "message", "type": { - "$id": "9944", + "$id": "9966", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134913,11 +135371,11 @@ "isHttpMetadata": false }, { - "$id": "9945", + "$id": "9967", "kind": "property", "name": "first_id", "type": { - "$id": "9946", + "$id": "9968", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134933,11 +135391,11 @@ "isHttpMetadata": false }, { - "$id": "9947", + "$id": "9969", "kind": "property", "name": "last_id", "type": { - "$id": "9948", + "$id": "9970", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -134953,12 +135411,12 @@ "isHttpMetadata": false }, { - "$id": "9949", + "$id": "9971", "kind": "property", "name": "has_more", "doc": "Whether there are more items available.", "type": { - "$id": "9950", + "$id": "9972", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -134993,13 +135451,13 @@ }, "parameters": [ { - "$id": "9951", + "$id": "9973", "kind": "method", "name": "limit", "serializedName": "limit", "doc": "Number of items to retrieve", "type": { - "$id": "9952", + "$id": "9974", "kind": "integer", "name": "integer", "crossLanguageDefinitionId": "TypeSpec.integer", @@ -135015,13 +135473,13 @@ "decorators": [] }, { - "$id": "9953", + "$id": "9975", "kind": "method", "name": "order", "serializedName": "order", "doc": "Sort order of results by timestamp. Use `asc` for ascending order or `desc` for descending order.", "type": { - "$ref": "9885" + "$ref": "9907" }, "location": "Query", "isApiVersion": false, @@ -135033,13 +135491,13 @@ "decorators": [] }, { - "$id": "9954", + "$id": "9976", "kind": "method", "name": "after", "serializedName": "after", "doc": "Identifier for the last item from the previous pagination request", "type": { - "$id": "9955", + "$id": "9977", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135055,7 +135513,7 @@ "decorators": [] }, { - "$id": "9956", + "$id": "9978", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -135074,7 +135532,7 @@ ], "response": { "type": { - "$ref": "9895" + "$ref": "9917" }, "resultSegments": [ "data" @@ -135091,7 +135549,7 @@ } }, { - "$id": "9957", + "$id": "9979", "kind": "basic", "name": "createVideo", "accessibility": "public", @@ -135099,7 +135557,7 @@ "doc": "Create a video", "summary": "Create video", "operation": { - "$id": "9958", + "$id": "9980", "name": "createVideo", "resourceName": "OpenAI", "summary": "Create video", @@ -135107,7 +135565,7 @@ "accessibility": "public", "parameters": [ { - "$id": "9959", + "$id": "9981", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -135123,7 +135581,7 @@ "crossLanguageDefinitionId": "OpenAI.createVideo.contentType" }, { - "$id": "9960", + "$id": "9982", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -135139,12 +135597,12 @@ "crossLanguageDefinitionId": "OpenAI.createVideo.accept" }, { - "$id": "9961", + "$id": "9983", "kind": "body", "name": "body", "serializedName": "body", "type": { - "$id": "9962", + "$id": "9984", "kind": "model", "name": "CreateVideoBody", "namespace": "OpenAI", @@ -135155,13 +135613,13 @@ "decorators": [], "properties": [ { - "$id": "9963", + "$id": "9985", "kind": "property", "name": "model", "serializedName": "model", "doc": "The video generation model to use. Defaults to `sora-2`.", "type": { - "$ref": "9901" + "$ref": "9923" }, "optional": true, "readOnly": false, @@ -135183,18 +135641,18 @@ "isHttpMetadata": false }, { - "$id": "9964", + "$id": "9986", "kind": "property", "name": "prompt", "serializedName": "prompt", "doc": "Text prompt that describes the video to generate.", "type": { - "$id": "9965", + "$id": "9987", "kind": "string", "name": "VideoPrompt", "crossLanguageDefinitionId": "OpenAI.VideoPrompt", "baseType": { - "$id": "9966", + "$id": "9988", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135222,13 +135680,13 @@ "isHttpMetadata": false }, { - "$id": "9967", + "$id": "9989", "kind": "property", "name": "input_reference", "serializedName": "input_reference", "doc": "Optional image reference that guides generation.", "type": { - "$id": "9968", + "$id": "9990", "kind": "bytes", "name": "bytes", "encode": "base64", @@ -135255,13 +135713,13 @@ "isHttpMetadata": false }, { - "$id": "9969", + "$id": "9991", "kind": "property", "name": "seconds", "serializedName": "seconds", "doc": "Clip duration in seconds. Defaults to 4 seconds.", "type": { - "$ref": "9930" + "$ref": "9952" }, "optional": true, "readOnly": false, @@ -135283,13 +135741,13 @@ "isHttpMetadata": false }, { - "$id": "9970", + "$id": "9992", "kind": "property", "name": "size", "serializedName": "size", "doc": "Output resolution formatted as width x height. Defaults to 720x1280.", "type": { - "$ref": "9923" + "$ref": "9945" }, "optional": true, "readOnly": false, @@ -135330,7 +135788,7 @@ 200 ], "bodyType": { - "$ref": "9896" + "$ref": "9918" }, "headers": [], "isErrorResponse": false, @@ -135353,7 +135811,7 @@ }, "parameters": [ { - "$id": "9971", + "$id": "9993", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -135370,12 +135828,12 @@ "decorators": [] }, { - "$id": "9972", + "$id": "9994", "kind": "method", "name": "body", "serializedName": "body", "type": { - "$ref": "9962" + "$ref": "9984" }, "location": "Body", "isApiVersion": false, @@ -135387,7 +135845,7 @@ "decorators": [] }, { - "$id": "9973", + "$id": "9995", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -135406,7 +135864,7 @@ ], "response": { "type": { - "$ref": "9896" + "$ref": "9918" } }, "isOverride": false, @@ -135415,7 +135873,7 @@ "crossLanguageDefinitionId": "OpenAI.createVideo" }, { - "$id": "9974", + "$id": "9996", "kind": "basic", "name": "DeleteVideo", "accessibility": "public", @@ -135423,7 +135881,7 @@ "doc": "Delete a video", "summary": "Delete video", "operation": { - "$id": "9975", + "$id": "9997", "name": "DeleteVideo", "resourceName": "OpenAI", "summary": "Delete video", @@ -135431,13 +135889,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9976", + "$id": "9998", "kind": "path", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video to delete.", "type": { - "$id": "9977", + "$id": "9999", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135455,7 +135913,7 @@ "crossLanguageDefinitionId": "OpenAI.DeleteVideo.video_id" }, { - "$id": "9978", + "$id": "10000", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -135477,7 +135935,7 @@ 200 ], "bodyType": { - "$id": "9979", + "$id": "10001", "kind": "model", "name": "DeletedVideoResource", "namespace": "OpenAI", @@ -135488,7 +135946,7 @@ "decorators": [], "properties": [ { - "$id": "9980", + "$id": "10002", "kind": "property", "name": "object", "doc": "The object type that signals the deletion response.", @@ -135505,12 +135963,12 @@ "isHttpMetadata": false }, { - "$id": "9981", + "$id": "10003", "kind": "property", "name": "deleted", "doc": "Indicates that the video resource was deleted.", "type": { - "$id": "9982", + "$id": "10004", "kind": "boolean", "name": "boolean", "crossLanguageDefinitionId": "TypeSpec.boolean", @@ -135526,12 +135984,12 @@ "isHttpMetadata": false }, { - "$id": "9983", + "$id": "10005", "kind": "property", "name": "id", "doc": "Identifier of the deleted video.", "type": { - "$id": "9984", + "$id": "10006", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135566,13 +136024,13 @@ }, "parameters": [ { - "$id": "9985", + "$id": "10007", "kind": "method", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video to delete.", "type": { - "$id": "9986", + "$id": "10008", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135588,7 +136046,7 @@ "decorators": [] }, { - "$id": "9987", + "$id": "10009", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -135607,7 +136065,7 @@ ], "response": { "type": { - "$ref": "9979" + "$ref": "10001" } }, "isOverride": false, @@ -135616,7 +136074,7 @@ "crossLanguageDefinitionId": "OpenAI.DeleteVideo" }, { - "$id": "9988", + "$id": "10010", "kind": "basic", "name": "GetVideo", "accessibility": "public", @@ -135624,7 +136082,7 @@ "doc": "Retrieve a video", "summary": "Retrieve video", "operation": { - "$id": "9989", + "$id": "10011", "name": "GetVideo", "resourceName": "OpenAI", "summary": "Retrieve video", @@ -135632,13 +136090,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9990", + "$id": "10012", "kind": "path", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video to retrieve.", "type": { - "$id": "9991", + "$id": "10013", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135656,7 +136114,7 @@ "crossLanguageDefinitionId": "OpenAI.GetVideo.video_id" }, { - "$id": "9992", + "$id": "10014", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -135678,7 +136136,7 @@ 200 ], "bodyType": { - "$ref": "9896" + "$ref": "9918" }, "headers": [], "isErrorResponse": false, @@ -135698,13 +136156,13 @@ }, "parameters": [ { - "$id": "9993", + "$id": "10015", "kind": "method", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video to retrieve.", "type": { - "$id": "9994", + "$id": "10016", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135720,7 +136178,7 @@ "decorators": [] }, { - "$id": "9995", + "$id": "10017", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -135739,7 +136197,7 @@ ], "response": { "type": { - "$ref": "9896" + "$ref": "9918" } }, "isOverride": false, @@ -135748,7 +136206,7 @@ "crossLanguageDefinitionId": "OpenAI.GetVideo" }, { - "$id": "9996", + "$id": "10018", "kind": "basic", "name": "DownloadVideo", "accessibility": "public", @@ -135756,7 +136214,7 @@ "doc": "Download video content", "summary": "Retrieve video content", "operation": { - "$id": "9997", + "$id": "10019", "name": "DownloadVideo", "resourceName": "OpenAI", "summary": "Retrieve video content", @@ -135764,13 +136222,13 @@ "accessibility": "public", "parameters": [ { - "$id": "9998", + "$id": "10020", "kind": "path", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video whose media to download.", "type": { - "$id": "9999", + "$id": "10021", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135788,18 +136246,18 @@ "crossLanguageDefinitionId": "OpenAI.RetrieveVideoContent.video_id" }, { - "$id": "10000", + "$id": "10022", "kind": "query", "name": "variant", "serializedName": "variant", "doc": "Which downloadable asset to return. Defaults to the MP4 video.", "type": { - "$id": "10001", + "$id": "10023", "kind": "enum", "name": "VideoContentVariant", "crossLanguageDefinitionId": "OpenAI.VideoContentVariant", "valueType": { - "$id": "10002", + "$id": "10024", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135807,41 +136265,41 @@ }, "values": [ { - "$id": "10003", + "$id": "10025", "kind": "enumvalue", "name": "video", "value": "video", "valueType": { - "$ref": "10002" + "$ref": "10024" }, "enumType": { - "$ref": "10001" + "$ref": "10023" }, "decorators": [] }, { - "$id": "10004", + "$id": "10026", "kind": "enumvalue", "name": "thumbnail", "value": "thumbnail", "valueType": { - "$ref": "10002" + "$ref": "10024" }, "enumType": { - "$ref": "10001" + "$ref": "10023" }, "decorators": [] }, { - "$id": "10005", + "$id": "10027", "kind": "enumvalue", "name": "spritesheet", "value": "spritesheet", "valueType": { - "$ref": "10002" + "$ref": "10024" }, "enumType": { - "$ref": "10001" + "$ref": "10023" }, "decorators": [] } @@ -135861,12 +136319,12 @@ "readOnly": false }, { - "$id": "10006", + "$id": "10028", "kind": "header", "name": "accept", "serializedName": "Accept", "type": { - "$id": "10007", + "$id": "10029", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135887,12 +136345,12 @@ 200 ], "bodyType": { - "$id": "10008", + "$id": "10030", "kind": "union", "name": "", "variantTypes": [ { - "$id": "10009", + "$id": "10031", "kind": "bytes", "name": "bytes", "encode": "base64", @@ -135900,7 +136358,7 @@ "decorators": [] }, { - "$id": "10010", + "$id": "10032", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135945,13 +136403,13 @@ }, "parameters": [ { - "$id": "10011", + "$id": "10033", "kind": "method", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the video whose media to download.", "type": { - "$id": "10012", + "$id": "10034", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -135967,13 +136425,13 @@ "decorators": [] }, { - "$id": "10013", + "$id": "10035", "kind": "method", "name": "variant", "serializedName": "variant", "doc": "Which downloadable asset to return. Defaults to the MP4 video.", "type": { - "$ref": "10001" + "$ref": "10023" }, "location": "Query", "isApiVersion": false, @@ -135985,12 +136443,12 @@ "decorators": [] }, { - "$id": "10014", + "$id": "10036", "kind": "method", "name": "accept", "serializedName": "Accept", "type": { - "$ref": "10007" + "$ref": "10029" }, "location": "Header", "isApiVersion": false, @@ -136004,7 +136462,7 @@ ], "response": { "type": { - "$ref": "10008" + "$ref": "10030" } }, "isOverride": false, @@ -136013,7 +136471,7 @@ "crossLanguageDefinitionId": "OpenAI.RetrieveVideoContent" }, { - "$id": "10015", + "$id": "10037", "kind": "basic", "name": "CreateVideoRemix", "accessibility": "public", @@ -136021,7 +136479,7 @@ "doc": "Create a video remix", "summary": "Remix video", "operation": { - "$id": "10016", + "$id": "10038", "name": "CreateVideoRemix", "resourceName": "OpenAI", "summary": "Remix video", @@ -136029,13 +136487,13 @@ "accessibility": "public", "parameters": [ { - "$id": "10017", + "$id": "10039", "kind": "path", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the completed video to remix.", "type": { - "$id": "10018", + "$id": "10040", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -136053,7 +136511,7 @@ "crossLanguageDefinitionId": "OpenAI.CreateVideoRemix.video_id" }, { - "$id": "10019", + "$id": "10041", "kind": "header", "name": "contentType", "serializedName": "Content-Type", @@ -136069,7 +136527,7 @@ "crossLanguageDefinitionId": "OpenAI.CreateVideoRemix.contentType" }, { - "$id": "10020", + "$id": "10042", "kind": "header", "name": "accept", "serializedName": "Accept", @@ -136085,12 +136543,12 @@ "crossLanguageDefinitionId": "OpenAI.CreateVideoRemix.accept" }, { - "$id": "10021", + "$id": "10043", "kind": "body", "name": "body", "serializedName": "body", "type": { - "$id": "10022", + "$id": "10044", "kind": "model", "name": "CreateVideoRemixBody", "namespace": "OpenAI", @@ -136101,18 +136559,18 @@ "decorators": [], "properties": [ { - "$id": "10023", + "$id": "10045", "kind": "property", "name": "prompt", "serializedName": "prompt", "doc": "Updated text prompt that directs the remix generation.", "type": { - "$id": "10024", + "$id": "10046", "kind": "string", "name": "VideoPrompt", "crossLanguageDefinitionId": "OpenAI.VideoPrompt", "baseType": { - "$id": "10025", + "$id": "10047", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -136159,7 +136617,7 @@ 200 ], "bodyType": { - "$ref": "9896" + "$ref": "9918" }, "headers": [], "isErrorResponse": false, @@ -136182,13 +136640,13 @@ }, "parameters": [ { - "$id": "10026", + "$id": "10048", "kind": "method", "name": "video_id", "serializedName": "video_id", "doc": "The identifier of the completed video to remix.", "type": { - "$id": "10027", + "$id": "10049", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string", @@ -136204,7 +136662,7 @@ "decorators": [] }, { - "$id": "10028", + "$id": "10050", "kind": "method", "name": "contentType", "serializedName": "Content-Type", @@ -136221,12 +136679,12 @@ "decorators": [] }, { - "$id": "10029", + "$id": "10051", "kind": "method", "name": "body", "serializedName": "body", "type": { - "$ref": "10022" + "$ref": "10044" }, "location": "Body", "isApiVersion": false, @@ -136238,7 +136696,7 @@ "decorators": [] }, { - "$id": "10030", + "$id": "10052", "kind": "method", "name": "accept", "serializedName": "Accept", @@ -136257,7 +136715,7 @@ ], "response": { "type": { - "$ref": "9896" + "$ref": "9918" } }, "isOverride": false, @@ -136268,13 +136726,13 @@ ], "parameters": [ { - "$id": "10031", + "$id": "10053", "kind": "endpoint", "name": "endpoint", "serializedName": "endpoint", "doc": "Service host", "type": { - "$id": "10032", + "$id": "10054", "kind": "url", "name": "endpoint", "crossLanguageDefinitionId": "TypeSpec.url" @@ -136285,7 +136743,7 @@ "isEndpoint": true, "defaultValue": { "type": { - "$id": "10033", + "$id": "10055", "kind": "string", "name": "string", "crossLanguageDefinitionId": "TypeSpec.string" @@ -136303,7 +136761,7 @@ "crossLanguageDefinitionId": "OpenAI", "apiVersions": [], "parent": { - "$ref": "8260" + "$ref": "8282" } } ] From d0382fbf24fa13c050fc5ea3944f5321a33ad113 Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 20 Nov 2025 12:10:48 -0600 Subject: [PATCH 14/15] fixups --- api/OpenAI.net8.0.cs | 60 ++-------- api/OpenAI.netstandard2.0.cs | 56 ++------- src/Custom/Responses/CreateResponseOptions.cs | 6 +- src/Custom/Responses/GetResponseOptions.cs | 6 + .../Responses/Includable.Serialization.cs | 42 ------- src/Custom/Responses/Includable.cs | 43 ------- .../Responses/ResponsesClient.Protocol.cs | 110 +++++++++--------- src/Custom/Responses/ResponsesClient.cs | 8 +- .../CreateResponseOptions.Serialization.cs | 10 +- .../Models/Responses/CreateResponseOptions.cs | 6 +- src/Generated/OpenAIModelFactory.cs | 4 +- tests/Responses/ResponsesTests.cs | 2 +- 12 files changed, 94 insertions(+), 259 deletions(-) delete mode 100644 src/Custom/Responses/Includable.Serialization.cs delete mode 100644 src/Custom/Responses/Includable.cs diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 174fea201..9bc8e2434 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -5095,6 +5095,7 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, [Experimental("OPENAI001")] public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(IEnumerable inputItems, string model); + public CreateResponseOptions(IEnumerable inputItems); public IList IncludedProperties { get; } public IList InputItems { get; } public string Instructions { get; set; } @@ -5464,14 +5465,14 @@ public class ImageGenerationToolInputImageMask : IJsonModel { public IncludedResponseProperty(string value); public static IncludedResponseProperty CodeInterpreterCallOutputs { get; } @@ -5489,7 +5490,6 @@ public enum Includable { public static implicit operator IncludedResponseProperty?(string value); public static bool operator !=(IncludedResponseProperty left, IncludedResponseProperty right); public override readonly string ToString(); ->>>>>>> upstream/main } [Experimental("OPENAI001")] public class McpTool : ResponseTool, IJsonModel, IPersistableModel { @@ -5645,8 +5645,6 @@ public enum MessageStatus { Incomplete = 2 } [Experimental("OPENAI001")] -<<<<<<< HEAD -======= public class OpenAIResponse : IJsonModel, IPersistableModel { public bool? BackgroundModeEnabled { get; } public DateTimeOffset CreatedAt { get; } @@ -5683,50 +5681,6 @@ public class OpenAIResponse : IJsonModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); - public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); - public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual Task> GetResponseAsync(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual CollectionResult GetResponseStreaming(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); - } - [Experimental("OPENAI001")] ->>>>>>> upstream/main public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); @@ -5820,8 +5774,6 @@ public enum ResponseContentPartKind { Refusal = 5 } [Experimental("OPENAI001")] -<<<<<<< HEAD -======= public class ResponseCreationOptions : IJsonModel, IPersistableModel { public bool? BackgroundModeEnabled { get; set; } public string EndUserId { get; set; } @@ -5850,7 +5802,6 @@ public class ResponseCreationOptions : IJsonModel, IPer protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } [Experimental("OPENAI001")] ->>>>>>> upstream/main public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } @@ -6193,8 +6144,11 @@ public class ResponsesClient { public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); + public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); @@ -6203,7 +6157,9 @@ public class ResponsesClient { public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseStreaming(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); } [Experimental("OPENAI001")] public readonly partial struct ResponseServiceTier : IEquatable { diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index a98275855..6e07e38fa 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -4456,6 +4456,7 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, } public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(IEnumerable inputItems, string model); + public CreateResponseOptions(IEnumerable inputItems); public IList IncludedProperties { get; } public IList InputItems { get; } public string Instructions { get; set; } @@ -4794,14 +4795,13 @@ public class ImageGenerationToolInputImageMask : IJsonModel { public IncludedResponseProperty(string value); public static IncludedResponseProperty CodeInterpreterCallOutputs { get; } @@ -4819,7 +4819,6 @@ public enum Includable { public static implicit operator IncludedResponseProperty?(string value); public static bool operator !=(IncludedResponseProperty left, IncludedResponseProperty right); public override readonly string ToString(); ->>>>>>> upstream/main } public class McpTool : ResponseTool, IJsonModel, IPersistableModel { public McpTool(string serverLabel, McpToolConnectorId connectorId); @@ -4959,8 +4958,6 @@ public enum MessageStatus { Completed = 1, Incomplete = 2 } -<<<<<<< HEAD -======= public class OpenAIResponse : IJsonModel, IPersistableModel { public bool? BackgroundModeEnabled { get; } public DateTimeOffset CreatedAt { get; } @@ -4995,47 +4992,6 @@ public class OpenAIResponse : IJsonModel, IPersistableModel CancelResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task CancelResponseAsync(string responseId, RequestOptions options); - public virtual Task> CancelResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(BinaryContent content, RequestOptions options = null); - public virtual ClientResult CreateResponse(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult CreateResponse(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task CreateResponseAsync(BinaryContent content, RequestOptions options = null); - public virtual Task> CreateResponseAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual Task> CreateResponseAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult CreateResponseStreaming(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(IEnumerable inputItems, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult CreateResponseStreamingAsync(string userInputText, ResponseCreationOptions options = null, CancellationToken cancellationToken = default); - public virtual ClientResult DeleteResponse(string responseId, RequestOptions options); - public virtual ClientResult DeleteResponse(string responseId, CancellationToken cancellationToken = default); - public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); - public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual Task> GetResponseAsync(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); - public virtual CollectionResult GetResponseStreaming(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); - public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); - } ->>>>>>> upstream/main public static class OpenAIResponsesModelFactory { public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); @@ -5119,8 +5075,6 @@ public enum ResponseContentPartKind { OutputText = 4, Refusal = 5 } -<<<<<<< HEAD -======= public class ResponseCreationOptions : IJsonModel, IPersistableModel { public bool? BackgroundModeEnabled { get; set; } public string EndUserId { get; set; } @@ -5147,7 +5101,6 @@ public class ResponseCreationOptions : IJsonModel, IPer protected virtual ResponseCreationOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } ->>>>>>> upstream/main public class ResponseDeletionResult : IJsonModel, IPersistableModel { public bool Deleted { get; } public string Id { get; } @@ -5458,8 +5411,11 @@ public class ResponsesClient { public virtual Task DeleteResponseAsync(string responseId, RequestOptions options); public virtual Task> DeleteResponseAsync(string responseId, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); + public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); @@ -5468,7 +5424,9 @@ public class ResponsesClient { public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseInputItemsAsync(string responseId, int? limit, string order, string after, string before, RequestOptions options); public virtual CollectionResult GetResponseStreaming(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual CollectionResult GetResponseStreaming(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(GetResponseOptions options, CancellationToken cancellationToken = default); + public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, IEnumerable include = null, int? startingAfter = null, bool? includeObfuscation = null, CancellationToken cancellationToken = default); } public readonly partial struct ResponseServiceTier : IEquatable { public ResponseServiceTier(string value); diff --git a/src/Custom/Responses/CreateResponseOptions.cs b/src/Custom/Responses/CreateResponseOptions.cs index 2284293d8..26560c88d 100644 --- a/src/Custom/Responses/CreateResponseOptions.cs +++ b/src/Custom/Responses/CreateResponseOptions.cs @@ -16,7 +16,7 @@ public CreateResponseOptions(IEnumerable inputItems, string model) Metadata = new ChangeTrackingDictionary(); Tools = new ChangeTrackingList(); InputItems = inputItems.ToList(); - IncludedProperties = new ChangeTrackingList(); + IncludedProperties = new ChangeTrackingList(); Model = model; } @@ -42,7 +42,7 @@ public CreateResponseOptions(IEnumerable inputItems, string model) /// Gets or sets the list of fields to include in the response. This corresponds to the "include" property in the JSON representation. /// [CodeGenMember("Include")] - public IList IncludedProperties { get; } + public IList IncludedProperties { get; } /// /// Gets or sets whether multiple tool calls can be made in parallel. This corresponds to the "parallel_tool_calls" property in the JSON representation. @@ -115,7 +115,7 @@ internal static CreateResponseOptions Create(IEnumerable inputItem responseCreationOptions.ToolChoice, responseCreationOptions.TruncationMode, [.. inputItems], - [.. responseCreationOptions.IncludedProperties.Select(x => x.ToIncludable())], + [.. responseCreationOptions.IncludedProperties], responseCreationOptions.ParallelToolCallsEnabled, responseCreationOptions.StoredOutputEnabled, responseCreationOptions.Stream, diff --git a/src/Custom/Responses/GetResponseOptions.cs b/src/Custom/Responses/GetResponseOptions.cs index 63bec2623..6bcff7c9a 100644 --- a/src/Custom/Responses/GetResponseOptions.cs +++ b/src/Custom/Responses/GetResponseOptions.cs @@ -1,4 +1,6 @@ +using System.Collections.Generic; + namespace OpenAI.Responses { public class GetResponseOptions @@ -13,5 +15,9 @@ public GetResponseOptions(string responseId) public int? StartingAfter { get; set; } public bool Stream { get; set; } + + public IEnumerable IncludedProperties { get; set;} + + public bool? IncludeObfuscation { get; set; } } } \ No newline at end of file diff --git a/src/Custom/Responses/Includable.Serialization.cs b/src/Custom/Responses/Includable.Serialization.cs deleted file mode 100644 index 3edc8f6aa..000000000 --- a/src/Custom/Responses/Includable.Serialization.cs +++ /dev/null @@ -1,42 +0,0 @@ -using System; - -namespace OpenAI.Responses -{ - internal static partial class IncludableExtensions - { - public static string ToSerialString(this Includable value) => value switch - { - Includable.FileSearchCallResults => "file_search_call.results", - Includable.MessageInputImageImageUrl => "message.input_image.image_url", - Includable.ComputerCallOutputOutputImageUrl => "computer_call_output.output.image_url", - Includable.ReasoningEncryptedContent => "reasoning.encrypted_content", - Includable.CodeInterpreterCallOutputs => "code_interpreter_call.outputs", - _ => throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown Includable value.") - }; - - public static Includable ToIncludable(this string value) - { - if (StringComparer.OrdinalIgnoreCase.Equals(value, "file_search_call.results")) - { - return Includable.FileSearchCallResults; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "message.input_image.image_url")) - { - return Includable.MessageInputImageImageUrl; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "computer_call_output.output.image_url")) - { - return Includable.ComputerCallOutputOutputImageUrl; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "reasoning.encrypted_content")) - { - return Includable.ReasoningEncryptedContent; - } - if (StringComparer.OrdinalIgnoreCase.Equals(value, "code_interpreter_call.outputs")) - { - return Includable.CodeInterpreterCallOutputs; - } - throw new ArgumentOutOfRangeException(nameof(value), value, "Unknown Includable value."); - } - } -} diff --git a/src/Custom/Responses/Includable.cs b/src/Custom/Responses/Includable.cs deleted file mode 100644 index ea9d971e3..000000000 --- a/src/Custom/Responses/Includable.cs +++ /dev/null @@ -1,43 +0,0 @@ -using System; -using System.Diagnostics.CodeAnalysis; - -namespace OpenAI.Responses -{ - [Experimental("OPENAI001")] - public enum Includable - { - FileSearchCallResults, - MessageInputImageImageUrl, - ComputerCallOutputOutputImageUrl, - ReasoningEncryptedContent, - CodeInterpreterCallOutputs - } - - internal static partial class IncludableExtensions - { - internal static Includable ToIncludable(this IncludedResponseProperty includable) - { - if (includable == IncludedResponseProperty.FileSearchCallResults) - { - return Includable.FileSearchCallResults; - } - if (includable == IncludedResponseProperty.MessageInputImageUri) - { - return Includable.MessageInputImageImageUrl; - } - if (includable == IncludedResponseProperty.ComputerCallOutputImageUri) - { - return Includable.ComputerCallOutputOutputImageUrl; - } - if (includable == IncludedResponseProperty.ReasoningEncryptedContent) - { - return Includable.ReasoningEncryptedContent; - } - if (includable == IncludedResponseProperty.CodeInterpreterCallOutputs) - { - return Includable.CodeInterpreterCallOutputs; - } - throw new ArgumentException($"Unknown Includable value: {includable}", nameof(includable)); - } - } -} diff --git a/src/Custom/Responses/ResponsesClient.Protocol.cs b/src/Custom/Responses/ResponsesClient.Protocol.cs index fbdbb580c..7adb8c14e 100644 --- a/src/Custom/Responses/ResponsesClient.Protocol.cs +++ b/src/Custom/Responses/ResponsesClient.Protocol.cs @@ -1,62 +1,62 @@ -using System.ClientModel; -using System.ClientModel.Primitives; -using System.Collections.Generic; -using System.Threading.Tasks; +// using System.ClientModel; +// using System.ClientModel.Primitives; +// using System.Collections.Generic; +// using System.Threading.Tasks; -namespace OpenAI.Responses; +// namespace OpenAI.Responses; -[CodeGenSuppress("GetResponse", typeof(string), typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -[CodeGenSuppress("GetResponseAsync", typeof(string), typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -[CodeGenSuppress("CancelResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -[CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -[CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -[CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] -public partial class ResponsesClient -{ - public virtual async Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options) - { - Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); +// [CodeGenSuppress("GetResponse", typeof(string), typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// [CodeGenSuppress("CancelResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// [CodeGenSuppress("CancelResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// [CodeGenSuppress("GetResponse", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// [CodeGenSuppress("GetResponseAsync", typeof(string), typeof(IEnumerable), typeof(bool?), typeof(int?), typeof(RequestOptions))] +// public partial class ResponsesClient +// { +// public virtual async Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options) +// { +// Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - using PipelineMessage message = CreateGetResponseRequest(responseId, [], stream, startingAfter, options); +// using PipelineMessage message = CreateGetResponseRequest(responseId, [], stream, startingAfter, options); - PipelineResponse protocolResponse = await Pipeline.ProcessMessageAsync(message, options).ConfigureAwait(false); - return ClientResult.FromResponse(protocolResponse); - } +// PipelineResponse protocolResponse = await Pipeline.ProcessMessageAsync(message, options).ConfigureAwait(false); +// return ClientResult.FromResponse(protocolResponse); +// } - public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options) - { - Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); +// public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options) +// { +// Argument.AssertNotNullOrEmpty(responseId, nameof(responseId)); - using PipelineMessage message = CreateGetResponseRequest(responseId, [], stream, startingAfter, options); - PipelineResponse protocolResponse = Pipeline.ProcessMessage(message, options); - return ClientResult.FromResponse(protocolResponse); - } +// using PipelineMessage message = CreateGetResponseRequest(responseId, [], stream, startingAfter, options); +// PipelineResponse protocolResponse = Pipeline.ProcessMessage(message, options); +// return ClientResult.FromResponse(protocolResponse); +// } - internal virtual PipelineMessage CreateGetResponseRequest(string responseId, IEnumerable includables, bool? stream, int? startingAfter, RequestOptions options) - { - ClientUriBuilder uri = new ClientUriBuilder(); - uri.Reset(_endpoint); - uri.AppendPath("/responses/", false); - uri.AppendPath(responseId, true); - if (includables != null && !(includables is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) - { - foreach (var @param in includables) - { - uri.AppendQuery("include[]", @param.ToSerialString(), true); - } - } - if (stream != null) - { - uri.AppendQuery("stream", TypeFormatters.ConvertToString(stream), true); - } - if (startingAfter != null) - { - uri.AppendQuery("starting_after", TypeFormatters.ConvertToString(startingAfter), true); - } - PipelineMessage message = Pipeline.CreateMessage(uri.ToUri(), "GET", PipelineMessageClassifier200); - PipelineRequest request = message.Request; - request.Headers.Set("Accept", "application/json, text/event-stream"); - message.Apply(options); - return message; - } -} \ No newline at end of file +// internal virtual PipelineMessage CreateGetResponseRequest(string responseId, IEnumerable includables, bool? stream, int? startingAfter, RequestOptions options) +// { +// ClientUriBuilder uri = new ClientUriBuilder(); +// uri.Reset(_endpoint); +// uri.AppendPath("/responses/", false); +// uri.AppendPath(responseId, true); +// if (include != null && !(include is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) +// { +// foreach (var @param in include) +// { +// uri.AppendQuery("include[]", @param.ToString(), true); +// } +// } +// if (stream != null) +// { +// uri.AppendQuery("stream", TypeFormatters.ConvertToString(stream), true); +// } +// if (startingAfter != null) +// { +// uri.AppendQuery("starting_after", TypeFormatters.ConvertToString(startingAfter), true); +// } +// PipelineMessage message = Pipeline.CreateMessage(uri.ToUri(), "GET", PipelineMessageClassifier200); +// PipelineRequest request = message.Request; +// request.Headers.Set("Accept", "application/json, text/event-stream"); +// message.Apply(options); +// return message; +// } +// } \ No newline at end of file diff --git a/src/Custom/Responses/ResponsesClient.cs b/src/Custom/Responses/ResponsesClient.cs index 051cb26d7..defb0337a 100644 --- a/src/Custom/Responses/ResponsesClient.cs +++ b/src/Custom/Responses/ResponsesClient.cs @@ -287,7 +287,7 @@ public virtual async Task> GetResponseAsync(GetResp Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); - ClientResult protocolResult = await GetResponseAsync(options.ResponseId, stream: options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()).ConfigureAwait(false); + ClientResult protocolResult = await GetResponseAsync(options.ResponseId, options.IncludedProperties, stream: options.Stream, startingAfter: options.StartingAfter, includeObfuscation: options.IncludeObfuscation, cancellationToken.ToRequestOptions()).ConfigureAwait(false); return ClientResult.FromValue((ResponseResult)protocolResult, protocolResult.GetRawResponse()); } @@ -296,7 +296,7 @@ public virtual ClientResult GetResponse(GetResponseOptions optio Argument.AssertNotNull(options, nameof(options)); Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); - ClientResult protocolResult = GetResponse(options.ResponseId, stream: options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()); + ClientResult protocolResult = GetResponse(options.ResponseId, options.IncludedProperties, stream: options.Stream, startingAfter: options.StartingAfter, includeObfuscation: options.IncludeObfuscation, cancellationToken.ToRequestOptions()); return ClientResult.FromValue((ResponseResult)protocolResult, protocolResult.GetRawResponse()); } @@ -326,7 +326,7 @@ public virtual CollectionResult GetResponseStreaming(Ge Argument.AssertNotNullOrEmpty(options.ResponseId, nameof(options.ResponseId)); return new SseUpdateCollection( - () => GetResponse(options.ResponseId, stream: true, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions(streaming: true)), + () => GetResponse(options.ResponseId, options.IncludedProperties, stream: true, startingAfter: options.StartingAfter, includeObfuscation: options.IncludeObfuscation, cancellationToken.ToRequestOptions(streaming: true)), StreamingResponseUpdate.DeserializeStreamingResponseUpdate, cancellationToken); } @@ -342,7 +342,7 @@ public virtual AsyncCollectionResult GetResponseStreami } return new AsyncSseUpdateCollection( - async () => await GetResponseAsync(options.ResponseId, options.Stream, startingAfter: options.StartingAfter, cancellationToken.ToRequestOptions()).ConfigureAwait(false), + async () => await GetResponseAsync(options.ResponseId, options.IncludedProperties, options.Stream, startingAfter: options.StartingAfter, includeObfuscation: options.IncludeObfuscation, cancellationToken.ToRequestOptions()).ConfigureAwait(false), StreamingResponseUpdate.DeserializeStreamingResponseUpdate, cancellationToken); } diff --git a/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs b/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs index e83bc531e..9a9142324 100644 --- a/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs +++ b/src/Generated/Models/Responses/CreateResponseOptions.Serialization.cs @@ -199,7 +199,7 @@ protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWrit { continue; } - writer.WriteStringValue(IncludedProperties[i].ToSerialString()); + writer.WriteStringValue(IncludedProperties[i].ToString()); } Patch.WriteTo(writer, "$.include"u8); writer.WriteEndArray(); @@ -259,7 +259,7 @@ internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonEleme ResponseToolChoice toolChoice = default; ResponseTruncationMode? truncationMode = default; IList inputItems = default; - IList includedProperties = default; + IList includedProperties = default; bool? isParallelToolCallsEnabled = default; bool? isStoredOutputEnabled = default; bool? isStreamingEnabled = default; @@ -436,10 +436,10 @@ internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonEleme { continue; } - List array = new List(); + List array = new List(); foreach (var item in prop.Value.EnumerateArray()) { - array.Add(item.GetString().ToIncludable()); + array.Add(new IncludedResponseProperty(item.GetString())); } includedProperties = array; continue; @@ -493,7 +493,7 @@ internal static CreateResponseOptions DeserializeCreateResponseOptions(JsonEleme toolChoice, truncationMode, inputItems, - includedProperties ?? new ChangeTrackingList(), + includedProperties ?? new ChangeTrackingList(), isParallelToolCallsEnabled, isStoredOutputEnabled, isStreamingEnabled, diff --git a/src/Generated/Models/Responses/CreateResponseOptions.cs b/src/Generated/Models/Responses/CreateResponseOptions.cs index 8a85004b0..0dd7eacfd 100644 --- a/src/Generated/Models/Responses/CreateResponseOptions.cs +++ b/src/Generated/Models/Responses/CreateResponseOptions.cs @@ -25,11 +25,11 @@ public CreateResponseOptions(IEnumerable inputItems) Metadata = new ChangeTrackingDictionary(); Tools = new ChangeTrackingList(); InputItems = inputItems.ToList(); - IncludedProperties = new ChangeTrackingList(); + IncludedProperties = new ChangeTrackingList(); } #pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. - internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, string model, ResponseReasoningOptions reasoningOptions, bool? isBackgroundModeEnabled, int? maxOutputTokenCount, string instructions, ResponseTextOptions textOptions, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncationMode, IList inputItems, IList includedProperties, bool? isParallelToolCallsEnabled, bool? isStoredOutputEnabled, bool? isStreamingEnabled, in JsonPatch patch) + internal CreateResponseOptions(IDictionary metadata, float? temperature, float? topP, string user, ResponseServiceTier? serviceTier, string previousResponseId, string model, ResponseReasoningOptions reasoningOptions, bool? isBackgroundModeEnabled, int? maxOutputTokenCount, string instructions, ResponseTextOptions textOptions, IList tools, ResponseToolChoice toolChoice, ResponseTruncationMode? truncationMode, IList inputItems, IList includedProperties, bool? isParallelToolCallsEnabled, bool? isStoredOutputEnabled, bool? isStreamingEnabled, in JsonPatch patch) { // Plugin customization: ensure initialization of collections Metadata = metadata ?? new ChangeTrackingDictionary(); @@ -48,7 +48,7 @@ internal CreateResponseOptions(IDictionary metadata, float? temp ToolChoice = toolChoice; TruncationMode = truncationMode; InputItems = inputItems ?? new ChangeTrackingList(); - IncludedProperties = includedProperties ?? new ChangeTrackingList(); + IncludedProperties = includedProperties ?? new ChangeTrackingList(); IsParallelToolCallsEnabled = isParallelToolCallsEnabled; IsStoredOutputEnabled = isStoredOutputEnabled; IsStreamingEnabled = isStreamingEnabled; diff --git a/src/Generated/OpenAIModelFactory.cs b/src/Generated/OpenAIModelFactory.cs index f6fbaa68c..2dc22a1ae 100644 --- a/src/Generated/OpenAIModelFactory.cs +++ b/src/Generated/OpenAIModelFactory.cs @@ -1397,12 +1397,12 @@ public static CodeInterpreterToolContainer CodeInterpreterToolContainer(string c return new CodeInterpreterToolContainer(containerId, containerConfiguration, default); } - public static CreateResponseOptions CreateResponseOptions(IDictionary metadata = default, float? temperature = default, float? topP = default, string user = default, ResponseServiceTier? serviceTier = default, string previousResponseId = default, string model = default, ResponseReasoningOptions reasoningOptions = default, bool? isBackgroundModeEnabled = default, int? maxOutputTokenCount = default, string instructions = default, ResponseTextOptions textOptions = default, IEnumerable tools = default, ResponseToolChoice toolChoice = default, ResponseTruncationMode? truncationMode = default, IEnumerable inputItems = default, IEnumerable includedProperties = default, bool? isParallelToolCallsEnabled = default, bool? isStoredOutputEnabled = default, bool? isStreamingEnabled = default) + public static CreateResponseOptions CreateResponseOptions(IDictionary metadata = default, float? temperature = default, float? topP = default, string user = default, ResponseServiceTier? serviceTier = default, string previousResponseId = default, string model = default, ResponseReasoningOptions reasoningOptions = default, bool? isBackgroundModeEnabled = default, int? maxOutputTokenCount = default, string instructions = default, ResponseTextOptions textOptions = default, IEnumerable tools = default, ResponseToolChoice toolChoice = default, ResponseTruncationMode? truncationMode = default, IEnumerable inputItems = default, IEnumerable includedProperties = default, bool? isParallelToolCallsEnabled = default, bool? isStoredOutputEnabled = default, bool? isStreamingEnabled = default) { metadata ??= new ChangeTrackingDictionary(); tools ??= new ChangeTrackingList(); inputItems ??= new ChangeTrackingList(); - includedProperties ??= new ChangeTrackingList(); + includedProperties ??= new ChangeTrackingList(); return new CreateResponseOptions( metadata, diff --git a/tests/Responses/ResponsesTests.cs b/tests/Responses/ResponsesTests.cs index f50f5670b..bf11c0d27 100644 --- a/tests/Responses/ResponsesTests.cs +++ b/tests/Responses/ResponsesTests.cs @@ -685,7 +685,7 @@ public async Task ReasoningWithStoreDisabled() CreateResponseOptions options = new(inputItems, "gpt-5-mini") { IsStoredOutputEnabled = false, - IncludedProperties = { Includable.ReasoningEncryptedContent } + IncludedProperties = { IncludedResponseProperty.ReasoningEncryptedContent } }; // First turn. From b17b1c1c641dd66d696f213ef4c4655304ba50da Mon Sep 17 00:00:00 2001 From: Christopher Scott Date: Thu, 20 Nov 2025 12:14:12 -0600 Subject: [PATCH 15/15] export --- api/OpenAI.net8.0.cs | 14 +++----------- api/OpenAI.netstandard2.0.cs | 13 +++---------- 2 files changed, 6 insertions(+), 21 deletions(-) diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 9bc8e2434..0a28aaf07 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -5096,7 +5096,7 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(IEnumerable inputItems, string model); public CreateResponseOptions(IEnumerable inputItems); - public IList IncludedProperties { get; } + public IList IncludedProperties { get; } public IList InputItems { get; } public string Instructions { get; set; } public bool? IsBackgroundModeEnabled { get; set; } @@ -5293,6 +5293,8 @@ public partial struct GetResponseInputItemsOptions { } public class GetResponseOptions { public GetResponseOptions(string responseId); + public IEnumerable IncludedProperties { get; set; } + public bool? IncludeObfuscation { get; set; } public string ResponseId { get; set; } public int? StartingAfter { get; set; } public bool Stream { get; set; } @@ -5465,14 +5467,6 @@ public class ImageGenerationToolInputImageMask : IJsonModel { public IncludedResponseProperty(string value); public static IncludedResponseProperty CodeInterpreterCallOutputs { get; } @@ -6146,10 +6140,8 @@ public class ResponsesClient { public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options); diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index 6e07e38fa..48e470ce6 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -4457,7 +4457,7 @@ public class ContainerFileCitationMessageAnnotation : ResponseMessageAnnotation, public class CreateResponseOptions : IJsonModel, IPersistableModel { public CreateResponseOptions(IEnumerable inputItems, string model); public CreateResponseOptions(IEnumerable inputItems); - public IList IncludedProperties { get; } + public IList IncludedProperties { get; } public IList InputItems { get; } public string Instructions { get; set; } public bool? IsBackgroundModeEnabled { get; set; } @@ -4636,6 +4636,8 @@ public partial struct GetResponseInputItemsOptions { } public class GetResponseOptions { public GetResponseOptions(string responseId); + public IEnumerable IncludedProperties { get; set; } + public bool? IncludeObfuscation { get; set; } public string ResponseId { get; set; } public int? StartingAfter { get; set; } public bool Stream { get; set; } @@ -4795,13 +4797,6 @@ public class ImageGenerationToolInputImageMask : IJsonModel { public IncludedResponseProperty(string value); public static IncludedResponseProperty CodeInterpreterCallOutputs { get; } @@ -5413,10 +5408,8 @@ public class ResponsesClient { public virtual ClientResult GetResponse(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual ClientResult GetResponse(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); public virtual ClientResult GetResponse(string responseId, IEnumerable include = null, CancellationToken cancellationToken = default); - public virtual ClientResult GetResponse(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual Task> GetResponseAsync(GetResponseOptions options, CancellationToken cancellationToken = default); public virtual Task GetResponseAsync(string responseId, IEnumerable include, bool? stream, int? startingAfter, bool? includeObfuscation, RequestOptions options); - public virtual Task GetResponseAsync(string responseId, bool? stream, int? startingAfter, RequestOptions options); public virtual ClientResult GetResponseInputItems(GetResponseInputItemsOptions options = default, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, ResponseItemCollectionOptions options = null, CancellationToken cancellationToken = default); public virtual CollectionResult GetResponseInputItems(string responseId, int? limit, string order, string after, string before, RequestOptions options);