diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI..JsonSerializerContext.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI..JsonSerializerContext.g.cs index 2f40164a..eecaeb8d 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI..JsonSerializerContext.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI..JsonSerializerContext.g.cs @@ -1331,22 +1331,26 @@ namespace tryAGI.OpenAI typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeConversationItemWithReferenceTypeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeCreateClientSecretRequestExpiresAfterAnchorJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeCreateClientSecretRequestExpiresAfterAnchorNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseIncludeItemJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseIncludeItemNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseMaxOutputTokensJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseMaxOutputTokensNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModelJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModelNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseOutputModalitieJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseOutputModalitieNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTracingEnumJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTracingEnumNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItemJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItemNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokensJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokensNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModelJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModelNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitieJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitieNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnumJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnumNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItemJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItemNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeMCPApprovalRequestTypeJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeMCPApprovalRequestTypeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeMCPApprovalResponseTypeJsonConverter), @@ -1525,10 +1529,16 @@ namespace tryAGI.OpenAI typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventSessionCreatedTypeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventSessionUpdatedTypeJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventSessionUpdatedTypeNullableJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionUpdatedTypeJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionUpdatedTypeNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseIncludeItemJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseIncludeItemNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseMaxOutputTokensJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseMaxOutputTokensNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseOutputModalitieJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseOutputModalitieNullableJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTracingEnumJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTracingEnumNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.ResponseErrorCodeJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.ResponseErrorCodeNullableJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.ResponseVariant3IncompleteDetailsReasonJsonConverter), @@ -2086,14 +2096,16 @@ namespace tryAGI.OpenAI typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), - typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), + typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter>), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter), diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeClientEvent.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeClientEvent.g.cs index de57ba42..6e1105f4 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeClientEvent.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeClientEvent.g.cs @@ -148,18 +148,6 @@ public class RealtimeClientEventJsonConverter : global::System.Text.Json.Seriali { } - readerCopy = reader; - global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? transcriptionSessionUpdate = default; - try - { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate).Name}"); - transcriptionSessionUpdate = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo); - } - catch (global::System.Text.Json.JsonException) - { - } - var result = new global::tryAGI.OpenAI.RealtimeClientEvent( conversationItemCreate, conversationItemDelete, @@ -171,8 +159,7 @@ public class RealtimeClientEventJsonConverter : global::System.Text.Json.Seriali inputAudioBufferCommit, responseCancel, responseCreate, - sessionUpdate, - transcriptionSessionUpdate + sessionUpdate ); if (conversationItemCreate != null) @@ -241,12 +228,6 @@ public class RealtimeClientEventJsonConverter : global::System.Text.Json.Seriali throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeClientEventSessionUpdate).Name}"); _ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo); } - else if (transcriptionSessionUpdate != null) - { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate).Name}"); - _ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo); - } return result; } @@ -326,12 +307,6 @@ public override void Write( throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeClientEventSessionUpdate).Name}"); global::System.Text.Json.JsonSerializer.Serialize(writer, value.SessionUpdate, typeInfo); } - else if (value.IsTranscriptionSessionUpdate) - { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate).Name}"); - global::System.Text.Json.JsonSerializer.Serialize(writer, value.TranscriptionSessionUpdate, typeInfo); - } } } } \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs new file mode 100644 index 00000000..1fdbd4b0 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs @@ -0,0 +1,53 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessExtensions.ToEnum(stringValue) ?? default; + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessExtensions.ToValueString(value)); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessNullable.g.cs new file mode 100644 index 00000000..364c36c0 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessNullable.g.cs @@ -0,0 +1,60 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessExtensions.ToEnum(stringValue); + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness?); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessExtensions.ToValueString(value.Value)); + } + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs similarity index 71% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs index b0d6e15e..cb4345b1 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessExtensions.ToEnum(stringValue) ?? default; + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeExtensions.ToEnum(stringValue) ?? default; } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,12 +42,12 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessExtensions.ToValueString(value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeExtensions.ToValueString(value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeNullable.g.cs similarity index 72% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessNullable.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeNullable.g.cs index 4daec218..a6fd31ae 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessNullable.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeNullable.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessExtensions.ToEnum(stringValue); + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeExtensions.ToEnum(stringValue); } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness?); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType?); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,7 +42,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernes /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); @@ -53,7 +53,7 @@ public override void Write( } else { - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessExtensions.ToValueString(value.Value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeExtensions.ToValueString(value.Value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItem.g.cs similarity index 72% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItem.g.cs index 96ad0308..08602d26 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItem.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAIncludeItemJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJson var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeExtensions.ToEnum(stringValue) ?? default; + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItemExtensions.ToEnum(stringValue) ?? default; } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJson case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,12 +42,12 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJson /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeExtensions.ToValueString(value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItemExtensions.ToValueString(value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItemNullable.g.cs similarity index 73% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNullable.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItemNullable.g.cs index 552a3f4a..587c1b8a 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNullable.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAIncludeItemNullable.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAIncludeItemNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType? Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem? Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNull var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeExtensions.ToEnum(stringValue); + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItemExtensions.ToEnum(stringValue); } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNull case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType?); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem?); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,7 +42,7 @@ public sealed class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeNull /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType? value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem? value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); @@ -53,7 +53,7 @@ public override void Write( } else { - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeExtensions.ToValueString(value.Value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItemExtensions.ToValueString(value.Value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs new file mode 100644 index 00000000..aea6a052 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs @@ -0,0 +1,53 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAMaxOutputTokensJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokensExtensions.ToEnum(stringValue) ?? default; + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokensExtensions.ToValueString(value)); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokensNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokensNullable.g.cs new file mode 100644 index 00000000..51a5f7c3 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAMaxOutputTokensNullable.g.cs @@ -0,0 +1,60 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAMaxOutputTokensNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens? Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokensExtensions.ToEnum(stringValue); + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens?); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens? value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokensExtensions.ToValueString(value.Value)); + } + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModel.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModel.g.cs similarity index 77% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModel.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModel.g.cs index a179dd81..01d825ae 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModel.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModel.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseModelJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAModelJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseModelJsonConverter : global::Sy var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseModelExtensions.ToEnum(stringValue) ?? default; + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModelExtensions.ToEnum(stringValue) ?? default; } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseModelJsonConverter : global::Sy case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,12 +42,12 @@ public sealed class RealtimeSessionCreateResponseModelJsonConverter : global::Sy /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseModelExtensions.ToValueString(value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModelExtensions.ToValueString(value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModelNullable.g.cs similarity index 78% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeNullable.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModelNullable.g.cs index a5408897..524e49cf 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeNullable.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAModelNullable.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseTypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGAModelNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseType? Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel? Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseTypeNullableJsonConverter : glo var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseTypeExtensions.ToEnum(stringValue); + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModelExtensions.ToEnum(stringValue); } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseTypeNullableJsonConverter : glo case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseType)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseType?); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel?); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,7 +42,7 @@ public sealed class RealtimeSessionCreateResponseTypeNullableJsonConverter : glo /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseType? value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel? value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); @@ -53,7 +53,7 @@ public override void Write( } else { - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseTypeExtensions.ToValueString(value.Value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModelExtensions.ToValueString(value.Value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitie.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitie.g.cs new file mode 100644 index 00000000..0dd08b59 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitie.g.cs @@ -0,0 +1,53 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAOutputModalitieJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitieExtensions.ToEnum(stringValue) ?? default; + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitieExtensions.ToValueString(value)); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitieNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitieNullable.g.cs new file mode 100644 index 00000000..4b584a2d --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAOutputModalitieNullable.g.cs @@ -0,0 +1,60 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGAOutputModalitieNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie? Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitieExtensions.ToEnum(stringValue); + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie?); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie? value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitieExtensions.ToValueString(value.Value)); + } + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnum.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnum.g.cs new file mode 100644 index 00000000..d1d164c0 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnum.g.cs @@ -0,0 +1,53 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGATracingEnumJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumExtensions.ToEnum(stringValue) ?? default; + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumExtensions.ToValueString(value)); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnumNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnumNullable.g.cs new file mode 100644 index 00000000..89a9a392 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATracingEnumNullable.g.cs @@ -0,0 +1,60 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeSessionCreateResponseGATracingEnumNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum? Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumExtensions.ToEnum(stringValue); + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum?); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum? value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumExtensions.ToValueString(value.Value)); + } + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAType.g.cs similarity index 78% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAType.g.cs index ec761d11..dcfad6a4 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAType.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseTypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGATypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseType Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseTypeJsonConverter : global::Sys var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseTypeExtensions.ToEnum(stringValue) ?? default; + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATypeExtensions.ToEnum(stringValue) ?? default; } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseTypeJsonConverter : global::Sys case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseType)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseType); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,12 +42,12 @@ public sealed class RealtimeSessionCreateResponseTypeJsonConverter : global::Sys /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseType value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseTypeExtensions.ToValueString(value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATypeExtensions.ToValueString(value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModelNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeNullable.g.cs similarity index 79% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModelNullable.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeNullable.g.cs index 960115dc..3c98ba13 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseModelNullable.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeNullable.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeSessionCreateResponseModelNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeSessionCreateResponseGATypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel? Read( + public override global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType? Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeSessionCreateResponseModelNullableJsonConverter : gl var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeSessionCreateResponseModelExtensions.ToEnum(stringValue); + return global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATypeExtensions.ToEnum(stringValue); } break; @@ -26,11 +26,11 @@ public sealed class RealtimeSessionCreateResponseModelNullableJsonConverter : gl case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel)numValue; + return (global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel?); + return default(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType?); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,7 +42,7 @@ public sealed class RealtimeSessionCreateResponseModelNullableJsonConverter : gl /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel? value, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType? value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); @@ -53,7 +53,7 @@ public override void Write( } else { - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseModelExtensions.ToValueString(value.Value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATypeExtensions.ToValueString(value.Value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs new file mode 100644 index 00000000..65c6ad4d --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs @@ -0,0 +1,53 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeTranscriptionSessionCreateResponseGAIncludeItemJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItemExtensions.ToEnum(stringValue) ?? default; + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItemExtensions.ToValueString(value)); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItemNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItemNullable.g.cs new file mode 100644 index 00000000..38c341f8 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAIncludeItemNullable.g.cs @@ -0,0 +1,60 @@ +#nullable enable + +namespace tryAGI.OpenAI.JsonConverters +{ + /// + public sealed class RealtimeTranscriptionSessionCreateResponseGAIncludeItemNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + { + /// + public override global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem? Read( + ref global::System.Text.Json.Utf8JsonReader reader, + global::System.Type typeToConvert, + global::System.Text.Json.JsonSerializerOptions options) + { + switch (reader.TokenType) + { + case global::System.Text.Json.JsonTokenType.String: + { + var stringValue = reader.GetString(); + if (stringValue != null) + { + return global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItemExtensions.ToEnum(stringValue); + } + + break; + } + case global::System.Text.Json.JsonTokenType.Number: + { + var numValue = reader.GetInt32(); + return (global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem)numValue; + } + case global::System.Text.Json.JsonTokenType.Null: + { + return default(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem?); + } + default: + throw new global::System.ArgumentOutOfRangeException(nameof(reader)); + } + + return default; + } + + /// + public override void Write( + global::System.Text.Json.Utf8JsonWriter writer, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem? value, + global::System.Text.Json.JsonSerializerOptions options) + { + writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItemExtensions.ToValueString(value.Value)); + } + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAType.g.cs similarity index 65% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAType.g.cs index b6b6e738..99c42460 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGAType.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeTranscriptionSessionCreateResponseGATypeJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType Read( + public override global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeJsonConver var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedTypeExtensions.ToEnum(stringValue) ?? default; + return global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGATypeExtensions.ToEnum(stringValue) ?? default; } break; @@ -26,11 +26,11 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeJsonConver case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType)numValue; + return (global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType); + return default(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,12 +42,12 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeJsonConver /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType value, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedTypeExtensions.ToValueString(value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGATypeExtensions.ToValueString(value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeNullable.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeNullable.g.cs similarity index 67% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeNullable.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeNullable.g.cs index b1207872..4fcc0d9e 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeNullable.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeNullable.g.cs @@ -3,10 +3,10 @@ namespace tryAGI.OpenAI.JsonConverters { /// - public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter + public sealed class RealtimeTranscriptionSessionCreateResponseGATypeNullableJsonConverter : global::System.Text.Json.Serialization.JsonConverter { /// - public override global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType? Read( + public override global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType? Read( ref global::System.Text.Json.Utf8JsonReader reader, global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) @@ -18,7 +18,7 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeNullableJs var stringValue = reader.GetString(); if (stringValue != null) { - return global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedTypeExtensions.ToEnum(stringValue); + return global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGATypeExtensions.ToEnum(stringValue); } break; @@ -26,11 +26,11 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeNullableJs case global::System.Text.Json.JsonTokenType.Number: { var numValue = reader.GetInt32(); - return (global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType)numValue; + return (global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType)numValue; } case global::System.Text.Json.JsonTokenType.Null: { - return default(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType?); + return default(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType?); } default: throw new global::System.ArgumentOutOfRangeException(nameof(reader)); @@ -42,7 +42,7 @@ public sealed class RealtimeServerEventTranscriptionSessionCreatedTypeNullableJs /// public override void Write( global::System.Text.Json.Utf8JsonWriter writer, - global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType? value, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType? value, global::System.Text.Json.JsonSerializerOptions options) { writer = writer ?? throw new global::System.ArgumentNullException(nameof(writer)); @@ -53,7 +53,7 @@ public override void Write( } else { - writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedTypeExtensions.ToValueString(value.Value)); + writer.WriteStringValue(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGATypeExtensions.ToValueString(value.Value)); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.Session2.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.Session2.g.cs index a260b234..5fc05852 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.Session2.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonConverters.Session2.g.cs @@ -17,44 +17,44 @@ public class Session2JsonConverter : global::System.Text.Json.Serialization.Json var readerCopy = reader; - global::tryAGI.OpenAI.RealtimeSessionCreateResponse? realtimeSessionCreateResponse = default; + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? realtimeSessionCreateResponseGA = default; try { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse).Name}"); - realtimeSessionCreateResponse = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA).Name}"); + realtimeSessionCreateResponseGA = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo); } catch (global::System.Text.Json.JsonException) { } readerCopy = reader; - global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse? realtimeTranscriptionSessionCreateResponse = default; + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? realtimeTranscriptionSessionCreateResponseGA = default; try { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse).Name}"); - realtimeTranscriptionSessionCreateResponse = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA).Name}"); + realtimeTranscriptionSessionCreateResponseGA = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo); } catch (global::System.Text.Json.JsonException) { } var result = new global::tryAGI.OpenAI.Session2( - realtimeSessionCreateResponse, - realtimeTranscriptionSessionCreateResponse + realtimeSessionCreateResponseGA, + realtimeTranscriptionSessionCreateResponseGA ); - if (realtimeSessionCreateResponse != null) + if (realtimeSessionCreateResponseGA != null) { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse).Name}"); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA).Name}"); _ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo); } - else if (realtimeTranscriptionSessionCreateResponse != null) + else if (realtimeTranscriptionSessionCreateResponseGA != null) { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse).Name}"); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA).Name}"); _ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo); } @@ -70,17 +70,17 @@ public override void Write( options = options ?? throw new global::System.ArgumentNullException(nameof(options)); var typeInfoResolver = options.TypeInfoResolver ?? throw new global::System.InvalidOperationException("TypeInfoResolver is not set."); - if (value.IsRealtimeSessionCreateResponse) + if (value.IsRealtimeSessionCreateResponseGA) { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse).Name}"); - global::System.Text.Json.JsonSerializer.Serialize(writer, value.RealtimeSessionCreateResponse, typeInfo); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.RealtimeSessionCreateResponseGA, typeInfo); } - else if (value.IsRealtimeTranscriptionSessionCreateResponse) + else if (value.IsRealtimeTranscriptionSessionCreateResponseGA) { - var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? - throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse).Name}"); - global::System.Text.Json.JsonSerializer.Serialize(writer, value.RealtimeTranscriptionSessionCreateResponse, typeInfo); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.RealtimeTranscriptionSessionCreateResponseGA, typeInfo); } } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonSerializerContextTypes.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonSerializerContextTypes.g.cs index 74186ee1..87d965ac 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonSerializerContextTypes.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.JsonSerializerContextTypes.g.cs @@ -7150,15 +7150,15 @@ public sealed partial class JsonSerializerContextTypes /// /// /// - public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? Type1781 { get; set; } + public global::tryAGI.OpenAI.RealtimeClientEventDiscriminator? Type1781 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdateType? Type1782 { get; set; } + public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? Type1782 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeClientEventDiscriminator? Type1783 { get; set; } + public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdateType? Type1783 { get; set; } /// /// /// @@ -7358,2386 +7358,2478 @@ public sealed partial class JsonSerializerContextTypes /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponse? Type1833 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? Type1833 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudio? Type1834 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio? Type1834 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInput? Type1835 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput? Type1835 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputNoiseReduction? Type1836 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction? Type1836 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetection? Type1837 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection? Type1837 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? Type1838 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? Type1838 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType? Type1839 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? Type1839 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioOutput? Type1840 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput? Type1840 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret? Type1841 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret? Type1841 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type1842 { get; set; } + public global::System.Collections.Generic.IList? Type1842 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseIncludeItem? Type1843 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAIncludeItem? Type1843 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type1844 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type1844 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseMaxOutputTokens? Type1845 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAMaxOutputTokens? Type1845 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type1846 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type1846 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseModel? Type1847 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAModel? Type1847 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type1848 { get; set; } + public global::System.Collections.Generic.IList? Type1848 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalitie? Type1849 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAOutputModalitie? Type1849 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type1850 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type1850 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTracingEnum? Type1851 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum? Type1851 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTracingEnum2? Type1852 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2? Type1852 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseType? Type1853 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType? Type1853 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeCreateClientSecretResponseSessionDiscriminator? Type1854 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? Type1854 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPApprovalRequest? Type1855 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio? Type1855 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPApprovalRequestType? Type1856 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput? Type1856 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPApprovalResponse? Type1857 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction? Type1857 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPApprovalResponseType? Type1858 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection? Type1858 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPHTTPError? Type1859 { get; set; } + public global::System.Collections.Generic.IList? Type1859 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPHTTPErrorType? Type1860 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAIncludeItem? Type1860 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPListTools? Type1861 { get; set; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType? Type1861 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPListToolsType? Type1862 { get; set; } + public global::tryAGI.OpenAI.RealtimeCreateClientSecretResponseSessionDiscriminator? Type1862 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPProtocolError? Type1863 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPApprovalRequest? Type1863 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPProtocolErrorType? Type1864 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPApprovalRequestType? Type1864 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPToolCall? Type1865 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPApprovalResponse? Type1865 { get; set; } /// /// /// - public global::tryAGI.OpenAI.Error2? Type1866 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPApprovalResponseType? Type1866 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPToolExecutionError? Type1867 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPHTTPError? Type1867 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPToolExecutionErrorType? Type1868 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPHTTPErrorType? Type1868 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPToolCallErrorDiscriminator? Type1869 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPListTools? Type1869 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeMCPToolCallType? Type1870 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPListToolsType? Type1870 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponse? Type1871 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPProtocolError? Type1871 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseObject? Type1872 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPProtocolErrorType? Type1872 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseStatus? Type1873 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPToolCall? Type1873 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseUsage? Type1874 { get; set; } + public global::tryAGI.OpenAI.Error2? Type1874 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEvent? Type1875 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPToolExecutionError? Type1875 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeError? Type1876 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPToolExecutionErrorType? Type1876 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeErrorType? Type1877 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPToolCallErrorDiscriminator? Type1877 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeErrorDetails? Type1878 { get; set; } + public global::tryAGI.OpenAI.RealtimeMCPToolCallType? Type1878 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreated? Type1879 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponse? Type1879 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionCreatedType? Type1880 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseObject? Type1880 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionUpdated? Type1881 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseStatus? Type1881 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionUpdatedType? Type1882 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseUsage? Type1882 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationCreated? Type1883 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEvent? Type1883 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationCreatedType? Type1884 { get; set; } + public global::tryAGI.OpenAI.RealtimeError? Type1884 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversation? Type1885 { get; set; } + public global::tryAGI.OpenAI.RealtimeErrorType? Type1885 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationObject? Type1886 { get; set; } + public global::tryAGI.OpenAI.RealtimeErrorDetails? Type1886 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemCreated? Type1887 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreated? Type1887 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemCreatedType? Type1888 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreatedType? Type1888 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionCompleted? Type1889 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionUpdated? Type1889 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionCompletedType? Type1890 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionUpdatedType? Type1890 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionFailed? Type1891 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationCreated? Type1891 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionFailedType? Type1892 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationCreatedType? Type1892 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemTruncated? Type1893 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversation? Type1893 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemTruncatedType? Type1894 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationObject? Type1894 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemDeleted? Type1895 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemCreated? Type1895 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemDeletedType? Type1896 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemCreatedType? Type1896 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommitted? Type1897 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionCompleted? Type1897 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommittedType? Type1898 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionCompletedType? Type1898 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferCleared? Type1899 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionFailed? Type1899 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferClearedType? Type1900 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemInputAudioTranscriptionFailedType? Type1900 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStarted? Type1901 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemTruncated? Type1901 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStartedType? Type1902 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemTruncatedType? Type1902 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStopped? Type1903 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemDeleted? Type1903 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStoppedType? Type1904 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemDeletedType? Type1904 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreated? Type1905 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommitted? Type1905 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreatedType? Type1906 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommittedType? Type1906 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseDone? Type1907 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferCleared? Type1907 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseDoneType? Type1908 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferClearedType? Type1908 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseOutputItemAdded? Type1909 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStarted? Type1909 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseOutputItemAddedType? Type1910 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStartedType? Type1910 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseOutputItemDone? Type1911 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStopped? Type1911 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseOutputItemDoneType? Type1912 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferSpeechStoppedType? Type1912 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseContentPartAdded? Type1913 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreated? Type1913 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseContentPartAddedType? Type1914 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreatedType? Type1914 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeContentPart? Type1915 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseDone? Type1915 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeContentPartType? Type1916 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseDoneType? Type1916 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseContentPartDone? Type1917 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseOutputItemAdded? Type1917 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseContentPartDoneType? Type1918 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseOutputItemAddedType? Type1918 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseTextDelta? Type1919 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseOutputItemDone? Type1919 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseTextDeltaType? Type1920 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseOutputItemDoneType? Type1920 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseTextDone? Type1921 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseContentPartAdded? Type1921 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseTextDoneType? Type1922 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseContentPartAddedType? Type1922 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDelta? Type1923 { get; set; } + public global::tryAGI.OpenAI.RealtimeContentPart? Type1923 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDeltaType? Type1924 { get; set; } + public global::tryAGI.OpenAI.RealtimeContentPartType? Type1924 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDone? Type1925 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseContentPartDone? Type1925 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDoneType? Type1926 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseContentPartDoneType? Type1926 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioDelta? Type1927 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseTextDelta? Type1927 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioDeltaType? Type1928 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseTextDeltaType? Type1928 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioDone? Type1929 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseTextDone? Type1929 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseAudioDoneType? Type1930 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseTextDoneType? Type1930 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDelta? Type1931 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDelta? Type1931 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDeltaType? Type1932 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDeltaType? Type1932 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDone? Type1933 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDone? Type1933 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDoneType? Type1934 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioTranscriptDoneType? Type1934 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeRateLimitsUpdated? Type1935 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioDelta? Type1935 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedType? Type1936 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioDeltaType? Type1936 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type1937 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioDone? Type1937 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedRateLimit? Type1938 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseAudioDoneType? Type1938 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedRateLimitName? Type1939 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDelta? Type1939 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationCreated? Type1940 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDeltaType? Type1940 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedConversation? Type1941 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDone? Type1941 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedConversationObject? Type1942 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseFunctionCallArgumentsDoneType? Type1942 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedType? Type1943 { get; set; } + public global::tryAGI.OpenAI.RealtimeRateLimitsUpdated? Type1943 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemAdded? Type1944 { get; set; } + public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedType? Type1944 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemAddedType? Type1945 { get; set; } + public global::System.Collections.Generic.IList? Type1945 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemCreated? Type1946 { get; set; } + public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedRateLimit? Type1946 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemCreatedType? Type1947 { get; set; } + public global::tryAGI.OpenAI.RealtimeRateLimitsUpdatedRateLimitName? Type1947 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDeleted? Type1948 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationCreated? Type1948 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDeletedType? Type1949 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedConversation? Type1949 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDone? Type1950 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedConversationObject? Type1950 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDoneType? Type1951 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationCreatedType? Type1951 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionCompleted? Type1952 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemAdded? Type1952 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionCompletedType? Type1953 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemAddedType? Type1953 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionDelta? Type1954 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemCreated? Type1954 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionDeltaType? Type1955 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemCreatedType? Type1955 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailed? Type1956 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDeleted? Type1956 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailedError? Type1957 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDeletedType? Type1957 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailedType? Type1958 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDone? Type1958 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionSegment? Type1959 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemDoneType? Type1959 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionSegmentType? Type1960 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionCompleted? Type1960 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemRetrieved? Type1961 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionCompletedType? Type1961 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemRetrievedType? Type1962 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionDelta? Type1962 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemTruncated? Type1963 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionDeltaType? Type1963 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventConversationItemTruncatedType? Type1964 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailed? Type1964 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventError? Type1965 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailedError? Type1965 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventErrorError? Type1966 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionFailedType? Type1966 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventErrorType? Type1967 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionSegment? Type1967 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCleared? Type1968 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemInputAudioTranscriptionSegmentType? Type1968 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferClearedType? Type1969 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemRetrieved? Type1969 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCommitted? Type1970 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemRetrievedType? Type1970 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCommittedType? Type1971 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemTruncated? Type1971 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStarted? Type1972 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventConversationItemTruncatedType? Type1972 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStartedType? Type1973 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventError? Type1973 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStopped? Type1974 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventErrorError? Type1974 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStoppedType? Type1975 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventErrorType? Type1975 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferTimeoutTriggered? Type1976 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCleared? Type1976 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferTimeoutTriggeredType? Type1977 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferClearedType? Type1977 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsCompleted? Type1978 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCommitted? Type1978 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsCompletedType? Type1979 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferCommittedType? Type1979 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsFailed? Type1980 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStarted? Type1980 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsFailedType? Type1981 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStartedType? Type1981 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsInProgress? Type1982 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStopped? Type1982 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsInProgressType? Type1983 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferSpeechStoppedType? Type1983 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferCleared? Type1984 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferTimeoutTriggered? Type1984 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferClearedType? Type1985 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventInputAudioBufferTimeoutTriggeredType? Type1985 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStarted? Type1986 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsCompleted? Type1986 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStartedType? Type1987 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsCompletedType? Type1987 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStopped? Type1988 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsFailed? Type1988 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStoppedType? Type1989 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsFailedType? Type1989 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdated? Type1990 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsInProgress? Type1990 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type1991 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventMCPListToolsInProgressType? Type1991 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedRateLimit? Type1992 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferCleared? Type1992 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedRateLimitName? Type1993 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferClearedType? Type1993 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedType? Type1994 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStarted? Type1994 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDelta? Type1995 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStartedType? Type1995 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDeltaType? Type1996 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStopped? Type1996 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDone? Type1997 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventOutputAudioBufferStoppedType? Type1997 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDoneType? Type1998 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdated? Type1998 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDelta? Type1999 { get; set; } + public global::System.Collections.Generic.IList? Type1999 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDeltaType? Type2000 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedRateLimit? Type2000 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDone? Type2001 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedRateLimitName? Type2001 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDoneType? Type2002 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventRateLimitsUpdatedType? Type2002 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAdded? Type2003 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDelta? Type2003 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedPart? Type2004 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDeltaType? Type2004 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedPartType? Type2005 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDone? Type2005 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedType? Type2006 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioDoneType? Type2006 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDone? Type2007 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDelta? Type2007 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDonePart? Type2008 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDeltaType? Type2008 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDonePartType? Type2009 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDone? Type2009 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDoneType? Type2010 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseAudioTranscriptDoneType? Type2010 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseCreated? Type2011 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAdded? Type2011 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseCreatedType? Type2012 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedPart? Type2012 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseDone? Type2013 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedPartType? Type2013 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseDoneType? Type2014 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartAddedType? Type2014 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDelta? Type2015 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDone? Type2015 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDeltaType? Type2016 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDonePart? Type2016 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDone? Type2017 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDonePartType? Type2017 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDoneType? Type2018 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseContentPartDoneType? Type2018 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDelta? Type2019 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseCreated? Type2019 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDeltaType? Type2020 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseCreatedType? Type2020 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDone? Type2021 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseDone? Type2021 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDoneType? Type2022 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseDoneType? Type2022 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallCompleted? Type2023 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDelta? Type2023 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallCompletedType? Type2024 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDeltaType? Type2024 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallFailed? Type2025 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDone? Type2025 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallFailedType? Type2026 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseFunctionCallArgumentsDoneType? Type2026 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallInProgress? Type2027 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDelta? Type2027 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallInProgressType? Type2028 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDeltaType? Type2028 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemAdded? Type2029 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDone? Type2029 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemAddedType? Type2030 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallArgumentsDoneType? Type2030 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemDone? Type2031 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallCompleted? Type2031 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemDoneType? Type2032 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallCompletedType? Type2032 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDelta? Type2033 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallFailed? Type2033 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDeltaType? Type2034 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallFailedType? Type2034 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDone? Type2035 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallInProgress? Type2035 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDoneType? Type2036 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseMCPCallInProgressType? Type2036 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventSessionCreated? Type2037 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemAdded? Type2037 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventSessionCreatedType? Type2038 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemAddedType? Type2038 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventSessionUpdated? Type2039 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemDone? Type2039 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventSessionUpdatedType? Type2040 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseOutputItemDoneType? Type2040 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated? Type2041 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDelta? Type2041 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType? Type2042 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDeltaType? Type2042 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionUpdated? Type2043 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDone? Type2043 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionUpdatedType? Type2044 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventResponseTextDoneType? Type2044 { get; set; } /// /// /// - public global::tryAGI.OpenAI.Response? Type2045 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventSessionCreated? Type2045 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseVariant3? Type2046 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventSessionCreatedType? Type2046 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseError? Type2047 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventSessionUpdated? Type2047 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseErrorCode? Type2048 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventSessionUpdatedType? Type2048 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseVariant3IncompleteDetails? Type2049 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionUpdated? Type2049 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseVariant3IncompleteDetailsReason? Type2050 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionUpdatedType? Type2050 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseVariant3Object? Type2051 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponse? Type2051 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2052 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudio? Type2052 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseVariant3Status? Type2053 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInput? Type2053 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseUsage? Type2054 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputNoiseReduction? Type2054 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseUsageInputTokensDetails? Type2055 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetection? Type2055 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseUsageOutputTokensDetails? Type2056 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioOutput? Type2056 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioDeltaEvent? Type2057 { get; set; } + public global::System.Collections.Generic.IList? Type2057 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioDeltaEventType? Type2058 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseIncludeItem? Type2058 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioDoneEvent? Type2059 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type2059 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioDoneEventType? Type2060 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseMaxOutputTokens? Type2060 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioTranscriptDeltaEvent? Type2061 { get; set; } + public global::System.Collections.Generic.IList? Type2061 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioTranscriptDeltaEventType? Type2062 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalitie? Type2062 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioTranscriptDoneEvent? Type2063 { get; set; } + public global::System.Collections.Generic.IList? Type2063 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseAudioTranscriptDoneEventType? Type2064 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type2064 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDeltaEvent? Type2065 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTracingEnum? Type2065 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDeltaEventType? Type2066 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTracingEnum2? Type2066 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDoneEvent? Type2067 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection? Type2067 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDoneEventType? Type2068 { get; set; } + public global::tryAGI.OpenAI.Response? Type2068 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCompletedEvent? Type2069 { get; set; } + public global::tryAGI.OpenAI.ResponseVariant3? Type2069 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCompletedEventType? Type2070 { get; set; } + public global::tryAGI.OpenAI.ResponseError? Type2070 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInProgressEvent? Type2071 { get; set; } + public global::tryAGI.OpenAI.ResponseErrorCode? Type2071 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInProgressEventType? Type2072 { get; set; } + public global::tryAGI.OpenAI.ResponseVariant3IncompleteDetails? Type2072 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInterpretingEvent? Type2073 { get; set; } + public global::tryAGI.OpenAI.ResponseVariant3IncompleteDetailsReason? Type2073 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInterpretingEventType? Type2074 { get; set; } + public global::tryAGI.OpenAI.ResponseVariant3Object? Type2074 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCompletedEvent? Type2075 { get; set; } + public global::System.Collections.Generic.IList? Type2075 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCompletedEventType? Type2076 { get; set; } + public global::tryAGI.OpenAI.ResponseVariant3Status? Type2076 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseContentPartAddedEvent? Type2077 { get; set; } + public global::tryAGI.OpenAI.ResponseUsage? Type2077 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseContentPartAddedEventType? Type2078 { get; set; } + public global::tryAGI.OpenAI.ResponseUsageInputTokensDetails? Type2078 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseContentPartDoneEvent? Type2079 { get; set; } + public global::tryAGI.OpenAI.ResponseUsageOutputTokensDetails? Type2079 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseContentPartDoneEventType? Type2080 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioDeltaEvent? Type2080 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCreatedEvent? Type2081 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioDeltaEventType? Type2081 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCreatedEventType? Type2082 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioDoneEvent? Type2082 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCustomToolCallInputDeltaEvent? Type2083 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioDoneEventType? Type2083 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCustomToolCallInputDeltaEventType? Type2084 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioTranscriptDeltaEvent? Type2084 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCustomToolCallInputDoneEvent? Type2085 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioTranscriptDeltaEventType? Type2085 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseCustomToolCallInputDoneEventType? Type2086 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioTranscriptDoneEvent? Type2086 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseErrorEvent? Type2087 { get; set; } + public global::tryAGI.OpenAI.ResponseAudioTranscriptDoneEventType? Type2087 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseErrorEventType? Type2088 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDeltaEvent? Type2088 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFailedEvent? Type2089 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDeltaEventType? Type2089 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFailedEventType? Type2090 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDoneEvent? Type2090 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallCompletedEvent? Type2091 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCodeDoneEventType? Type2091 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallCompletedEventType? Type2092 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCompletedEvent? Type2092 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallInProgressEvent? Type2093 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallCompletedEventType? Type2093 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallInProgressEventType? Type2094 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInProgressEvent? Type2094 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallSearchingEvent? Type2095 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInProgressEventType? Type2095 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFileSearchCallSearchingEventType? Type2096 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInterpretingEvent? Type2096 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFormatTextGrammar? Type2097 { get; set; } + public global::tryAGI.OpenAI.ResponseCodeInterpreterCallInterpretingEventType? Type2097 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFormatTextGrammarType? Type2098 { get; set; } + public global::tryAGI.OpenAI.ResponseCompletedEvent? Type2098 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFormatTextPython? Type2099 { get; set; } + public global::tryAGI.OpenAI.ResponseCompletedEventType? Type2099 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFormatTextPythonType? Type2100 { get; set; } + public global::tryAGI.OpenAI.ResponseContentPartAddedEvent? Type2100 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDeltaEvent? Type2101 { get; set; } + public global::tryAGI.OpenAI.ResponseContentPartAddedEventType? Type2101 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDeltaEventType? Type2102 { get; set; } + public global::tryAGI.OpenAI.ResponseContentPartDoneEvent? Type2102 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDoneEvent? Type2103 { get; set; } + public global::tryAGI.OpenAI.ResponseContentPartDoneEventType? Type2103 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDoneEventType? Type2104 { get; set; } + public global::tryAGI.OpenAI.ResponseCreatedEvent? Type2104 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallCompletedEvent? Type2105 { get; set; } + public global::tryAGI.OpenAI.ResponseCreatedEventType? Type2105 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallCompletedEventType? Type2106 { get; set; } + public global::tryAGI.OpenAI.ResponseCustomToolCallInputDeltaEvent? Type2106 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallGeneratingEvent? Type2107 { get; set; } + public global::tryAGI.OpenAI.ResponseCustomToolCallInputDeltaEventType? Type2107 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallGeneratingEventType? Type2108 { get; set; } + public global::tryAGI.OpenAI.ResponseCustomToolCallInputDoneEvent? Type2108 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallInProgressEvent? Type2109 { get; set; } + public global::tryAGI.OpenAI.ResponseCustomToolCallInputDoneEventType? Type2109 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallInProgressEventType? Type2110 { get; set; } + public global::tryAGI.OpenAI.ResponseErrorEvent? Type2110 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallPartialImageEvent? Type2111 { get; set; } + public global::tryAGI.OpenAI.ResponseErrorEventType? Type2111 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseImageGenCallPartialImageEventType? Type2112 { get; set; } + public global::tryAGI.OpenAI.ResponseFailedEvent? Type2112 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseInProgressEvent? Type2113 { get; set; } + public global::tryAGI.OpenAI.ResponseFailedEventType? Type2113 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseInProgressEventType? Type2114 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallCompletedEvent? Type2114 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseIncompleteEvent? Type2115 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallCompletedEventType? Type2115 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseIncompleteEventType? Type2116 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallInProgressEvent? Type2116 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseItemList? Type2117 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallInProgressEventType? Type2117 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2118 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallSearchingEvent? Type2118 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseItemListObject? Type2119 { get; set; } + public global::tryAGI.OpenAI.ResponseFileSearchCallSearchingEventType? Type2119 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseLogProb? Type2120 { get; set; } + public global::tryAGI.OpenAI.ResponseFormatTextGrammar? Type2120 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2121 { get; set; } + public global::tryAGI.OpenAI.ResponseFormatTextGrammarType? Type2121 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseLogProbTopLogprob? Type2122 { get; set; } + public global::tryAGI.OpenAI.ResponseFormatTextPython? Type2122 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDeltaEvent? Type2123 { get; set; } + public global::tryAGI.OpenAI.ResponseFormatTextPythonType? Type2123 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDeltaEventType? Type2124 { get; set; } + public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDeltaEvent? Type2124 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDoneEvent? Type2125 { get; set; } + public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDeltaEventType? Type2125 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDoneEventType? Type2126 { get; set; } + public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDoneEvent? Type2126 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallCompletedEvent? Type2127 { get; set; } + public global::tryAGI.OpenAI.ResponseFunctionCallArgumentsDoneEventType? Type2127 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallCompletedEventType? Type2128 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallCompletedEvent? Type2128 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallFailedEvent? Type2129 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallCompletedEventType? Type2129 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallFailedEventType? Type2130 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallGeneratingEvent? Type2130 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallInProgressEvent? Type2131 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallGeneratingEventType? Type2131 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPCallInProgressEventType? Type2132 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallInProgressEvent? Type2132 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsCompletedEvent? Type2133 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallInProgressEventType? Type2133 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsCompletedEventType? Type2134 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallPartialImageEvent? Type2134 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsFailedEvent? Type2135 { get; set; } + public global::tryAGI.OpenAI.ResponseImageGenCallPartialImageEventType? Type2135 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsFailedEventType? Type2136 { get; set; } + public global::tryAGI.OpenAI.ResponseInProgressEvent? Type2136 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsInProgressEvent? Type2137 { get; set; } + public global::tryAGI.OpenAI.ResponseInProgressEventType? Type2137 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseMCPListToolsInProgressEventType? Type2138 { get; set; } + public global::tryAGI.OpenAI.ResponseIncompleteEvent? Type2138 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputItemAddedEvent? Type2139 { get; set; } + public global::tryAGI.OpenAI.ResponseIncompleteEventType? Type2139 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputItemAddedEventType? Type2140 { get; set; } + public global::tryAGI.OpenAI.ResponseItemList? Type2140 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputItemDoneEvent? Type2141 { get; set; } + public global::System.Collections.Generic.IList? Type2141 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputItemDoneEventType? Type2142 { get; set; } + public global::tryAGI.OpenAI.ResponseItemListObject? Type2142 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputTextAnnotationAddedEvent? Type2143 { get; set; } + public global::tryAGI.OpenAI.ResponseLogProb? Type2143 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseOutputTextAnnotationAddedEventType? Type2144 { get; set; } + public global::System.Collections.Generic.IList? Type2144 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseQueuedEvent? Type2145 { get; set; } + public global::tryAGI.OpenAI.ResponseLogProbTopLogprob? Type2145 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseQueuedEventType? Type2146 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDeltaEvent? Type2146 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEvent? Type2147 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDeltaEventType? Type2147 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventPart? Type2148 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDoneEvent? Type2148 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventPartType? Type2149 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallArgumentsDoneEventType? Type2149 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventType? Type2150 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallCompletedEvent? Type2150 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEvent? Type2151 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallCompletedEventType? Type2151 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventPart? Type2152 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallFailedEvent? Type2152 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventPartType? Type2153 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallFailedEventType? Type2153 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventType? Type2154 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallInProgressEvent? Type2154 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDeltaEvent? Type2155 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPCallInProgressEventType? Type2155 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDeltaEventType? Type2156 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsCompletedEvent? Type2156 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDoneEvent? Type2157 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsCompletedEventType? Type2157 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDoneEventType? Type2158 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsFailedEvent? Type2158 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningTextDeltaEvent? Type2159 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsFailedEventType? Type2159 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningTextDeltaEventType? Type2160 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsInProgressEvent? Type2160 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningTextDoneEvent? Type2161 { get; set; } + public global::tryAGI.OpenAI.ResponseMCPListToolsInProgressEventType? Type2161 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseReasoningTextDoneEventType? Type2162 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputItemAddedEvent? Type2162 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseRefusalDeltaEvent? Type2163 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputItemAddedEventType? Type2163 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseRefusalDeltaEventType? Type2164 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputItemDoneEvent? Type2164 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseRefusalDoneEvent? Type2165 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputItemDoneEventType? Type2165 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseRefusalDoneEventType? Type2166 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputTextAnnotationAddedEvent? Type2166 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseStreamEvent? Type2167 { get; set; } + public global::tryAGI.OpenAI.ResponseOutputTextAnnotationAddedEventType? Type2167 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseTextDeltaEvent? Type2168 { get; set; } + public global::tryAGI.OpenAI.ResponseQueuedEvent? Type2168 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2169 { get; set; } + public global::tryAGI.OpenAI.ResponseQueuedEventType? Type2169 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseTextDeltaEventType? Type2170 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEvent? Type2170 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseTextDoneEvent? Type2171 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventPart? Type2171 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseTextDoneEventType? Type2172 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventPartType? Type2172 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallCompletedEvent? Type2173 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartAddedEventType? Type2173 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallCompletedEventType? Type2174 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEvent? Type2174 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallInProgressEvent? Type2175 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventPart? Type2175 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallInProgressEventType? Type2176 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventPartType? Type2176 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallSearchingEvent? Type2177 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryPartDoneEventType? Type2177 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseWebSearchCallSearchingEventType? Type2178 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDeltaEvent? Type2178 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResponseStreamEventDiscriminator? Type2179 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDeltaEventType? Type2179 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderRequest? Type2180 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDoneEvent? Type2180 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderRequestGrader? Type2181 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningSummaryTextDoneEventType? Type2181 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderRequestGraderDiscriminator? Type2182 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningTextDeltaEvent? Type2182 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderResponse? Type2183 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningTextDeltaEventType? Type2183 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderResponseMetadata? Type2184 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningTextDoneEvent? Type2184 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RunGraderResponseMetadataErrors? Type2185 { get; set; } + public global::tryAGI.OpenAI.ResponseReasoningTextDoneEventType? Type2185 { get; set; } /// /// /// - public global::tryAGI.OpenAI.SubmitToolOutputsRunRequest? Type2186 { get; set; } + public global::tryAGI.OpenAI.ResponseRefusalDeltaEvent? Type2186 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2187 { get; set; } + public global::tryAGI.OpenAI.ResponseRefusalDeltaEventType? Type2187 { get; set; } /// /// /// - public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestToolOutput? Type2188 { get; set; } + public global::tryAGI.OpenAI.ResponseRefusalDoneEvent? Type2188 { get; set; } /// /// /// - public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestWithoutStream? Type2189 { get; set; } + public global::tryAGI.OpenAI.ResponseRefusalDoneEventType? Type2189 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2190 { get; set; } + public global::tryAGI.OpenAI.ResponseStreamEvent? Type2190 { get; set; } /// /// /// - public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestWithoutStreamToolOutput? Type2191 { get; set; } + public global::tryAGI.OpenAI.ResponseTextDeltaEvent? Type2191 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ToggleCertificatesRequest? Type2192 { get; set; } + public global::System.Collections.Generic.IList? Type2192 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UpdateConversationBody? Type2193 { get; set; } + public global::tryAGI.OpenAI.ResponseTextDeltaEventType? Type2193 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UpdateVectorStoreFileAttributesRequest? Type2194 { get; set; } + public global::tryAGI.OpenAI.ResponseTextDoneEvent? Type2194 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UpdateVectorStoreRequest? Type2195 { get; set; } + public global::tryAGI.OpenAI.ResponseTextDoneEventType? Type2195 { get; set; } /// /// /// - public global::tryAGI.OpenAI.Upload? Type2196 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallCompletedEvent? Type2196 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UploadObject? Type2197 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallCompletedEventType? Type2197 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UploadStatus? Type2198 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallInProgressEvent? Type2198 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UploadCertificateRequest? Type2199 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallInProgressEventType? Type2199 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UploadPart? Type2200 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallSearchingEvent? Type2200 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UploadPartObject? Type2201 { get; set; } + public global::tryAGI.OpenAI.ResponseWebSearchCallSearchingEventType? Type2201 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioSpeechesResult? Type2202 { get; set; } + public global::tryAGI.OpenAI.ResponseStreamEventDiscriminator? Type2202 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioSpeechesResultObject? Type2203 { get; set; } + public global::tryAGI.OpenAI.RunGraderRequest? Type2203 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioTranscriptionsResult? Type2204 { get; set; } + public global::tryAGI.OpenAI.RunGraderRequestGrader? Type2204 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioTranscriptionsResultObject? Type2205 { get; set; } + public global::tryAGI.OpenAI.RunGraderRequestGraderDiscriminator? Type2205 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsResult? Type2206 { get; set; } + public global::tryAGI.OpenAI.RunGraderResponse? Type2206 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsResultObject? Type2207 { get; set; } + public global::tryAGI.OpenAI.RunGraderResponseMetadata? Type2207 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCompletionsResult? Type2208 { get; set; } + public global::tryAGI.OpenAI.RunGraderResponseMetadataErrors? Type2208 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCompletionsResultObject? Type2209 { get; set; } + public global::tryAGI.OpenAI.SubmitToolOutputsRunRequest? Type2209 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageEmbeddingsResult? Type2210 { get; set; } + public global::System.Collections.Generic.IList? Type2210 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageEmbeddingsResultObject? Type2211 { get; set; } + public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestToolOutput? Type2211 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesResult? Type2212 { get; set; } + public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestWithoutStream? Type2212 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesResultObject? Type2213 { get; set; } + public global::System.Collections.Generic.IList? Type2213 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageModerationsResult? Type2214 { get; set; } + public global::tryAGI.OpenAI.SubmitToolOutputsRunRequestWithoutStreamToolOutput? Type2214 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageModerationsResultObject? Type2215 { get; set; } + public global::tryAGI.OpenAI.ToggleCertificatesRequest? Type2215 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageResponse? Type2216 { get; set; } + public global::tryAGI.OpenAI.UpdateConversationBody? Type2216 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2217 { get; set; } + public global::tryAGI.OpenAI.UpdateVectorStoreFileAttributesRequest? Type2217 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageTimeBucket? Type2218 { get; set; } + public global::tryAGI.OpenAI.UpdateVectorStoreRequest? Type2218 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageTimeBucketObject? Type2219 { get; set; } + public global::tryAGI.OpenAI.Upload? Type2219 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2220 { get; set; } + public global::tryAGI.OpenAI.UploadObject? Type2220 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ResultItem? Type2221 { get; set; } + public global::tryAGI.OpenAI.UploadStatus? Type2221 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageVectorStoresResult? Type2222 { get; set; } + public global::tryAGI.OpenAI.UploadCertificateRequest? Type2222 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageVectorStoresResultObject? Type2223 { get; set; } + public global::tryAGI.OpenAI.UploadPart? Type2223 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageTimeBucketResultItemDiscriminator? Type2224 { get; set; } + public global::tryAGI.OpenAI.UploadPartObject? Type2224 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageResponseObject? Type2225 { get; set; } + public global::tryAGI.OpenAI.UsageAudioSpeechesResult? Type2225 { get; set; } /// /// /// - public global::tryAGI.OpenAI.User? Type2226 { get; set; } + public global::tryAGI.OpenAI.UsageAudioSpeechesResultObject? Type2226 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserObject? Type2227 { get; set; } + public global::tryAGI.OpenAI.UsageAudioTranscriptionsResult? Type2227 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserRole? Type2228 { get; set; } + public global::tryAGI.OpenAI.UsageAudioTranscriptionsResultObject? Type2228 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserDeleteResponse? Type2229 { get; set; } + public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsResult? Type2229 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserDeleteResponseObject? Type2230 { get; set; } + public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsResultObject? Type2230 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserListResponse? Type2231 { get; set; } + public global::tryAGI.OpenAI.UsageCompletionsResult? Type2231 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2232 { get; set; } + public global::tryAGI.OpenAI.UsageCompletionsResultObject? Type2232 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserListResponseObject? Type2233 { get; set; } + public global::tryAGI.OpenAI.UsageEmbeddingsResult? Type2233 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserRoleUpdateRequest? Type2234 { get; set; } + public global::tryAGI.OpenAI.UsageEmbeddingsResultObject? Type2234 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UserRoleUpdateRequestRole? Type2235 { get; set; } + public global::tryAGI.OpenAI.UsageImagesResult? Type2235 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ValidateGraderRequest? Type2236 { get; set; } + public global::tryAGI.OpenAI.UsageImagesResultObject? Type2236 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ValidateGraderResponse? Type2237 { get; set; } + public global::tryAGI.OpenAI.UsageModerationsResult? Type2237 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileBatchObject? Type2238 { get; set; } + public global::tryAGI.OpenAI.UsageModerationsResultObject? Type2238 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileBatchObjectFileCounts? Type2239 { get; set; } + public global::tryAGI.OpenAI.UsageResponse? Type2239 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileBatchObjectObject? Type2240 { get; set; } + public global::System.Collections.Generic.IList? Type2240 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileBatchObjectStatus? Type2241 { get; set; } + public global::tryAGI.OpenAI.UsageTimeBucket? Type2241 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileContentResponse? Type2242 { get; set; } + public global::tryAGI.OpenAI.UsageTimeBucketObject? Type2242 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2243 { get; set; } + public global::System.Collections.Generic.IList? Type2243 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileContentResponseDataItem? Type2244 { get; set; } + public global::tryAGI.OpenAI.ResultItem? Type2244 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreFileContentResponseObject? Type2245 { get; set; } + public global::tryAGI.OpenAI.UsageVectorStoresResult? Type2245 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchRequest? Type2246 { get; set; } + public global::tryAGI.OpenAI.UsageVectorStoresResultObject? Type2246 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type2247 { get; set; } + public global::tryAGI.OpenAI.UsageTimeBucketResultItemDiscriminator? Type2247 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf>? Type2248 { get; set; } + public global::tryAGI.OpenAI.UsageResponseObject? Type2248 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchRequestRankingOptions? Type2249 { get; set; } + public global::tryAGI.OpenAI.User? Type2249 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchRequestRankingOptionsRanker? Type2250 { get; set; } + public global::tryAGI.OpenAI.UserObject? Type2250 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchResultContentObject? Type2251 { get; set; } + public global::tryAGI.OpenAI.UserRole? Type2251 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchResultContentObjectType? Type2252 { get; set; } + public global::tryAGI.OpenAI.UserDeleteResponse? Type2252 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchResultItem? Type2253 { get; set; } + public global::tryAGI.OpenAI.UserDeleteResponseObject? Type2253 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2254 { get; set; } + public global::tryAGI.OpenAI.UserListResponse? Type2254 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchResultsPage? Type2255 { get; set; } + public global::System.Collections.Generic.IList? Type2255 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2256 { get; set; } + public global::tryAGI.OpenAI.UserListResponseObject? Type2256 { get; set; } /// /// /// - public global::tryAGI.OpenAI.VectorStoreSearchResultsPageObject? Type2257 { get; set; } + public global::tryAGI.OpenAI.UserRoleUpdateRequest? Type2257 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCancelled? Type2258 { get; set; } + public global::tryAGI.OpenAI.UserRoleUpdateRequestRole? Type2258 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCancelledData? Type2259 { get; set; } + public global::tryAGI.OpenAI.ValidateGraderRequest? Type2259 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCancelledObject? Type2260 { get; set; } + public global::tryAGI.OpenAI.ValidateGraderResponse? Type2260 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCancelledType? Type2261 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileBatchObject? Type2261 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCompleted? Type2262 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileBatchObjectFileCounts? Type2262 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCompletedData? Type2263 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileBatchObjectObject? Type2263 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCompletedObject? Type2264 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileBatchObjectStatus? Type2264 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchCompletedType? Type2265 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileContentResponse? Type2265 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchExpired? Type2266 { get; set; } + public global::System.Collections.Generic.IList? Type2266 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchExpiredData? Type2267 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileContentResponseDataItem? Type2267 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchExpiredObject? Type2268 { get; set; } + public global::tryAGI.OpenAI.VectorStoreFileContentResponseObject? Type2268 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchExpiredType? Type2269 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchRequest? Type2269 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchFailed? Type2270 { get; set; } + public global::tryAGI.OpenAI.AnyOf? Type2270 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchFailedData? Type2271 { get; set; } + public global::tryAGI.OpenAI.AnyOf>? Type2271 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchFailedObject? Type2272 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchRequestRankingOptions? Type2272 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookBatchFailedType? Type2273 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchRequestRankingOptionsRanker? Type2273 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunCanceled? Type2274 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchResultContentObject? Type2274 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunCanceledData? Type2275 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchResultContentObjectType? Type2275 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunCanceledObject? Type2276 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchResultItem? Type2276 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunCanceledType? Type2277 { get; set; } + public global::System.Collections.Generic.IList? Type2277 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunFailed? Type2278 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchResultsPage? Type2278 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunFailedData? Type2279 { get; set; } + public global::System.Collections.Generic.IList? Type2279 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunFailedObject? Type2280 { get; set; } + public global::tryAGI.OpenAI.VectorStoreSearchResultsPageObject? Type2280 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunFailedType? Type2281 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCancelled? Type2281 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunSucceeded? Type2282 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCancelledData? Type2282 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunSucceededData? Type2283 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCancelledObject? Type2283 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunSucceededObject? Type2284 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCancelledType? Type2284 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookEvalRunSucceededType? Type2285 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCompleted? Type2285 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobCancelled? Type2286 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCompletedData? Type2286 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledData? Type2287 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCompletedObject? Type2287 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledObject? Type2288 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchCompletedType? Type2288 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledType? Type2289 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchExpired? Type2289 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobFailed? Type2290 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchExpiredData? Type2290 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobFailedData? Type2291 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchExpiredObject? Type2291 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobFailedObject? Type2292 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchExpiredType? Type2292 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobFailedType? Type2293 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchFailed? Type2293 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobSucceeded? Type2294 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchFailedData? Type2294 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededData? Type2295 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchFailedObject? Type2295 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededObject? Type2296 { get; set; } + public global::tryAGI.OpenAI.WebhookBatchFailedType? Type2296 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededType? Type2297 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunCanceled? Type2297 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookRealtimeCallIncoming? Type2298 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunCanceledData? Type2298 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingData? Type2299 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunCanceledObject? Type2299 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2300 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunCanceledType? Type2300 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingDataSipHeader? Type2301 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunFailed? Type2301 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingObject? Type2302 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunFailedData? Type2302 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingType? Type2303 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunFailedObject? Type2303 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCancelled? Type2304 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunFailedType? Type2304 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCancelledData? Type2305 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunSucceeded? Type2305 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCancelledObject? Type2306 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunSucceededData? Type2306 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCancelledType? Type2307 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunSucceededObject? Type2307 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCompleted? Type2308 { get; set; } + public global::tryAGI.OpenAI.WebhookEvalRunSucceededType? Type2308 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCompletedData? Type2309 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobCancelled? Type2309 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCompletedObject? Type2310 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledData? Type2310 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseCompletedType? Type2311 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledObject? Type2311 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseFailed? Type2312 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobCancelledType? Type2312 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseFailedData? Type2313 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobFailed? Type2313 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseFailedObject? Type2314 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobFailedData? Type2314 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseFailedType? Type2315 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobFailedObject? Type2315 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseIncomplete? Type2316 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobFailedType? Type2316 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseIncompleteData? Type2317 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobSucceeded? Type2317 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseIncompleteObject? Type2318 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededData? Type2318 { get; set; } /// /// /// - public global::tryAGI.OpenAI.WebhookResponseIncompleteType? Type2319 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededObject? Type2319 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventType? Type2320 { get; set; } + public global::tryAGI.OpenAI.WebhookFineTuningJobSucceededType? Type2320 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeServerEventBase? Type2321 { get; set; } + public global::tryAGI.OpenAI.WebhookRealtimeCallIncoming? Type2321 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionUpdate? Type2322 { get; set; } + public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingData? Type2322 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeSessionUpdateType? Type2323 { get; set; } + public global::System.Collections.Generic.IList? Type2323 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferAppend? Type2324 { get; set; } + public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingDataSipHeader? Type2324 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferAppendType? Type2325 { get; set; } + public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingObject? Type2325 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommit? Type2326 { get; set; } + public global::tryAGI.OpenAI.WebhookRealtimeCallIncomingType? Type2326 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommitType? Type2327 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCancelled? Type2327 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferClear? Type2328 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCancelledData? Type2328 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeInputAudioBufferClearType? Type2329 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCancelledObject? Type2329 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemCreate? Type2330 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCancelledType? Type2330 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemCreateType? Type2331 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCompleted? Type2331 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemTruncate? Type2332 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCompletedData? Type2332 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemTruncateType? Type2333 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCompletedObject? Type2333 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemDelete? Type2334 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseCompletedType? Type2334 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeConversationItemDeleteType? Type2335 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseFailed? Type2335 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreate? Type2336 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseFailedData? Type2336 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateType? Type2337 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseFailedObject? Type2337 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponse? Type2338 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseFailedType? Type2338 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2339 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseIncomplete? Type2339 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponseModalitie? Type2340 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseIncompleteData? Type2340 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponseVoice? Type2341 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseIncompleteObject? Type2341 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2342 { get; set; } + public global::tryAGI.OpenAI.WebhookResponseIncompleteType? Type2342 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponseTool? Type2343 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventType? Type2343 { get; set; } /// /// /// - public global::tryAGI.OpenAI.OneOf? Type2344 { get; set; } + public global::tryAGI.OpenAI.RealtimeServerEventBase? Type2344 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponseToolChoice? Type2345 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionUpdate? Type2345 { get; set; } /// /// /// - public global::tryAGI.OpenAI.OneOf? Type2346 { get; set; } + public global::tryAGI.OpenAI.RealtimeSessionUpdateType? Type2346 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCreateResponseMaxOutputTokens? Type2347 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferAppend? Type2347 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCancel? Type2348 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferAppendType? Type2348 { get; set; } /// /// /// - public global::tryAGI.OpenAI.RealtimeResponseCancelType? Type2349 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommit? Type2349 { get; set; } /// /// /// - public global::tryAGI.OpenAI.CreateBatchRequest? Type2350 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferCommitType? Type2350 { get; set; } /// /// /// - public global::tryAGI.OpenAI.CreateBatchRequestCompletionWindow? Type2351 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferClear? Type2351 { get; set; } /// /// /// - public global::tryAGI.OpenAI.CreateBatchRequestEndpoint? Type2352 { get; set; } + public global::tryAGI.OpenAI.RealtimeInputAudioBufferClearType? Type2352 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UpdateChatCompletionRequest? Type2353 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemCreate? Type2353 { get; set; } /// /// /// - public global::tryAGI.OpenAI.CreateConversationItemsRequest? Type2354 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemCreateType? Type2354 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UpdateEvalRequest? Type2355 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemTruncate? Type2355 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AdminApiKeysCreateRequest? Type2356 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemTruncateType? Type2356 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListAssistantsOrder? Type2357 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemDelete? Type2357 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListChatCompletionsOrder? Type2358 { get; set; } + public global::tryAGI.OpenAI.RealtimeConversationItemDeleteType? Type2358 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetChatCompletionMessagesOrder? Type2359 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreate? Type2359 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListContainersOrder? Type2360 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateType? Type2360 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListContainerFilesOrder? Type2361 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponse? Type2361 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListConversationItemsOrder? Type2362 { get; set; } + public global::System.Collections.Generic.IList? Type2362 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListEvalsOrder? Type2363 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponseModalitie? Type2363 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListEvalsOrderBy? Type2364 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponseVoice? Type2364 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetEvalRunsOrder? Type2365 { get; set; } + public global::System.Collections.Generic.IList? Type2365 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetEvalRunsStatus? Type2366 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponseTool? Type2366 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetEvalRunOutputItemsStatus? Type2367 { get; set; } + public global::tryAGI.OpenAI.OneOf? Type2367 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetEvalRunOutputItemsOrder? Type2368 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponseToolChoice? Type2368 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListFilesOrder? Type2369 { get; set; } + public global::tryAGI.OpenAI.OneOf? Type2369 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListFineTuningCheckpointPermissionsOrder? Type2370 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCreateResponseMaxOutputTokens? Type2370 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AdminApiKeysListOrder? Type2371 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCancel? Type2371 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListAuditLogsEffectiveAt? Type2372 { get; set; } + public global::tryAGI.OpenAI.RealtimeResponseCancelType? Type2372 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2373 { get; set; } + public global::tryAGI.OpenAI.CreateBatchRequest? Type2373 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListOrganizationCertificatesOrder? Type2374 { get; set; } + public global::tryAGI.OpenAI.CreateBatchRequestCompletionWindow? Type2374 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2375 { get; set; } + public global::tryAGI.OpenAI.CreateBatchRequestEndpoint? Type2375 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetCertificateIncludeItem? Type2376 { get; set; } + public global::tryAGI.OpenAI.UpdateChatCompletionRequest? Type2376 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCostsBucketWidth? Type2377 { get; set; } + public global::tryAGI.OpenAI.CreateConversationItemsRequest? Type2377 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2378 { get; set; } + public global::tryAGI.OpenAI.UpdateEvalRequest? Type2378 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCostsGroupByItem? Type2379 { get; set; } + public global::tryAGI.OpenAI.AdminApiKeysCreateRequest? Type2379 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListProjectCertificatesOrder? Type2380 { get; set; } + public global::tryAGI.OpenAI.ListAssistantsOrder? Type2380 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioSpeechesBucketWidth? Type2381 { get; set; } + public global::tryAGI.OpenAI.ListChatCompletionsOrder? Type2381 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2382 { get; set; } + public global::tryAGI.OpenAI.GetChatCompletionMessagesOrder? Type2382 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioSpeechesGroupByItem? Type2383 { get; set; } + public global::tryAGI.OpenAI.ListContainersOrder? Type2383 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioTranscriptionsBucketWidth? Type2384 { get; set; } + public global::tryAGI.OpenAI.ListContainerFilesOrder? Type2384 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2385 { get; set; } + public global::tryAGI.OpenAI.ListConversationItemsOrder? Type2385 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageAudioTranscriptionsGroupByItem? Type2386 { get; set; } + public global::tryAGI.OpenAI.ListEvalsOrder? Type2386 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsBucketWidth? Type2387 { get; set; } + public global::tryAGI.OpenAI.ListEvalsOrderBy? Type2387 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2388 { get; set; } + public global::tryAGI.OpenAI.GetEvalRunsOrder? Type2388 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsGroupByItem? Type2389 { get; set; } + public global::tryAGI.OpenAI.GetEvalRunsStatus? Type2389 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCompletionsBucketWidth? Type2390 { get; set; } + public global::tryAGI.OpenAI.GetEvalRunOutputItemsStatus? Type2390 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2391 { get; set; } + public global::tryAGI.OpenAI.GetEvalRunOutputItemsOrder? Type2391 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageCompletionsGroupByItem? Type2392 { get; set; } + public global::tryAGI.OpenAI.ListFilesOrder? Type2392 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageEmbeddingsBucketWidth? Type2393 { get; set; } + public global::tryAGI.OpenAI.ListFineTuningCheckpointPermissionsOrder? Type2393 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2394 { get; set; } + public global::tryAGI.OpenAI.AdminApiKeysListOrder? Type2394 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageEmbeddingsGroupByItem? Type2395 { get; set; } + public global::tryAGI.OpenAI.ListAuditLogsEffectiveAt? Type2395 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesBucketWidth? Type2396 { get; set; } + public global::System.Collections.Generic.IList? Type2396 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2397 { get; set; } + public global::tryAGI.OpenAI.ListOrganizationCertificatesOrder? Type2397 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesSource? Type2398 { get; set; } + public global::System.Collections.Generic.IList? Type2398 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2399 { get; set; } + public global::tryAGI.OpenAI.GetCertificateIncludeItem? Type2399 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesSize? Type2400 { get; set; } + public global::tryAGI.OpenAI.UsageCostsBucketWidth? Type2400 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2401 { get; set; } + public global::System.Collections.Generic.IList? Type2401 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageImagesGroupByItem? Type2402 { get; set; } + public global::tryAGI.OpenAI.UsageCostsGroupByItem? Type2402 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageModerationsBucketWidth? Type2403 { get; set; } + public global::tryAGI.OpenAI.ListProjectCertificatesOrder? Type2403 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2404 { get; set; } + public global::tryAGI.OpenAI.UsageAudioSpeechesBucketWidth? Type2404 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageModerationsGroupByItem? Type2405 { get; set; } + public global::System.Collections.Generic.IList? Type2405 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageVectorStoresBucketWidth? Type2406 { get; set; } + public global::tryAGI.OpenAI.UsageAudioSpeechesGroupByItem? Type2406 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2407 { get; set; } + public global::tryAGI.OpenAI.UsageAudioTranscriptionsBucketWidth? Type2407 { get; set; } /// /// /// - public global::tryAGI.OpenAI.UsageVectorStoresGroupByItem? Type2408 { get; set; } + public global::System.Collections.Generic.IList? Type2408 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListInputItemsOrder? Type2409 { get; set; } + public global::tryAGI.OpenAI.UsageAudioTranscriptionsGroupByItem? Type2409 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListMessagesOrder? Type2410 { get; set; } + public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsBucketWidth? Type2410 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListRunsOrder? Type2411 { get; set; } + public global::System.Collections.Generic.IList? Type2411 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2412 { get; set; } + public global::tryAGI.OpenAI.UsageCodeInterpreterSessionsGroupByItem? Type2412 { get; set; } /// /// /// - public global::tryAGI.OpenAI.CreateRunIncludeItem? Type2413 { get; set; } + public global::tryAGI.OpenAI.UsageCompletionsBucketWidth? Type2413 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListRunStepsOrder? Type2414 { get; set; } + public global::System.Collections.Generic.IList? Type2414 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2415 { get; set; } + public global::tryAGI.OpenAI.UsageCompletionsGroupByItem? Type2415 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListRunStepsIncludeItem? Type2416 { get; set; } + public global::tryAGI.OpenAI.UsageEmbeddingsBucketWidth? Type2416 { get; set; } /// /// /// - public global::System.Collections.Generic.IList? Type2417 { get; set; } + public global::System.Collections.Generic.IList? Type2417 { get; set; } /// /// /// - public global::tryAGI.OpenAI.GetRunStepIncludeItem? Type2418 { get; set; } + public global::tryAGI.OpenAI.UsageEmbeddingsGroupByItem? Type2418 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListVectorStoresOrder? Type2419 { get; set; } + public global::tryAGI.OpenAI.UsageImagesBucketWidth? Type2419 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListFilesInVectorStoreBatchOrder? Type2420 { get; set; } + public global::System.Collections.Generic.IList? Type2420 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListFilesInVectorStoreBatchFilter? Type2421 { get; set; } + public global::tryAGI.OpenAI.UsageImagesSource? Type2421 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListVectorStoreFilesOrder? Type2422 { get; set; } + public global::System.Collections.Generic.IList? Type2422 { get; set; } /// /// /// - public global::tryAGI.OpenAI.ListVectorStoreFilesFilter? Type2423 { get; set; } + public global::tryAGI.OpenAI.UsageImagesSize? Type2423 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type2424 { get; set; } + public global::System.Collections.Generic.IList? Type2424 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AnyOf? Type2425 { get; set; } + public global::tryAGI.OpenAI.UsageImagesGroupByItem? Type2425 { get; set; } /// /// /// - public global::tryAGI.OpenAI.DeleteEvalResponse? Type2426 { get; set; } + public global::tryAGI.OpenAI.UsageModerationsBucketWidth? Type2426 { get; set; } /// /// /// - public global::tryAGI.OpenAI.DeleteEvalRunResponse? Type2427 { get; set; } + public global::System.Collections.Generic.IList? Type2427 { get; set; } /// /// /// - public global::tryAGI.OpenAI.AdminApiKeysDeleteResponse? Type2428 { get; set; } + public global::tryAGI.OpenAI.UsageModerationsGroupByItem? Type2428 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.UsageVectorStoresBucketWidth? Type2429 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type2430 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.UsageVectorStoresGroupByItem? Type2431 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListInputItemsOrder? Type2432 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListMessagesOrder? Type2433 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListRunsOrder? Type2434 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type2435 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.CreateRunIncludeItem? Type2436 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListRunStepsOrder? Type2437 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type2438 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListRunStepsIncludeItem? Type2439 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type2440 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.GetRunStepIncludeItem? Type2441 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListVectorStoresOrder? Type2442 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListFilesInVectorStoreBatchOrder? Type2443 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListFilesInVectorStoreBatchFilter? Type2444 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListVectorStoreFilesOrder? Type2445 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.ListVectorStoreFilesFilter? Type2446 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.AnyOf? Type2447 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.AnyOf? Type2448 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.DeleteEvalResponse? Type2449 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.DeleteEvalRunResponse? Type2450 { get; set; } + /// + /// + /// + public global::tryAGI.OpenAI.AdminApiKeysDeleteResponse? Type2451 { get; set; } } } \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEvent.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEvent.g.cs index 2977e61c..93cc96c6 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEvent.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEvent.g.cs @@ -420,8 +420,7 @@ public RealtimeClientEvent(global::tryAGI.OpenAI.RealtimeClientEventResponseCrea /// /// Send this event to update the session’s configuration.
/// The client may send this event at any time to update any field
- /// except for `voice` and `model`. `voice` can be updated only if there have been no other
- /// audio outputs yet.
+ /// except for `voice` and `model`. `voice` can be updated only if there have been no other audio outputs yet.
/// When the server receives a `session.update`, it will respond
/// with a `session.updated` event showing the full, effective configuration.
/// Only the fields that are present in the `session.update` are updated. To clear a field like
@@ -460,41 +459,6 @@ public RealtimeClientEvent(global::tryAGI.OpenAI.RealtimeClientEventSessionUpdat SessionUpdate = value; } - /// - /// Send this event to update a transcription session. - /// -#if NET6_0_OR_GREATER - public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? TranscriptionSessionUpdate { get; init; } -#else - public global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? TranscriptionSessionUpdate { get; } -#endif - - /// - /// - /// -#if NET6_0_OR_GREATER - [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(TranscriptionSessionUpdate))] -#endif - public bool IsTranscriptionSessionUpdate => TranscriptionSessionUpdate != null; - - /// - /// - /// - public static implicit operator RealtimeClientEvent(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate value) => new RealtimeClientEvent((global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate?)value); - - /// - /// - /// - public static implicit operator global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate?(RealtimeClientEvent @this) => @this.TranscriptionSessionUpdate; - - /// - /// - /// - public RealtimeClientEvent(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? value) - { - TranscriptionSessionUpdate = value; - } - /// /// /// @@ -509,8 +473,7 @@ public RealtimeClientEvent( global::tryAGI.OpenAI.RealtimeClientEventInputAudioBufferCommit? inputAudioBufferCommit, global::tryAGI.OpenAI.RealtimeClientEventResponseCancel? responseCancel, global::tryAGI.OpenAI.RealtimeClientEventResponseCreate? responseCreate, - global::tryAGI.OpenAI.RealtimeClientEventSessionUpdate? sessionUpdate, - global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate? transcriptionSessionUpdate + global::tryAGI.OpenAI.RealtimeClientEventSessionUpdate? sessionUpdate ) { ConversationItemCreate = conversationItemCreate; @@ -524,14 +487,12 @@ public RealtimeClientEvent( ResponseCancel = responseCancel; ResponseCreate = responseCreate; SessionUpdate = sessionUpdate; - TranscriptionSessionUpdate = transcriptionSessionUpdate; } /// /// /// public object? Object => - TranscriptionSessionUpdate as object ?? SessionUpdate as object ?? ResponseCreate as object ?? ResponseCancel as object ?? @@ -559,8 +520,7 @@ ConversationItemCreate as object InputAudioBufferCommit?.ToString() ?? ResponseCancel?.ToString() ?? ResponseCreate?.ToString() ?? - SessionUpdate?.ToString() ?? - TranscriptionSessionUpdate?.ToString() + SessionUpdate?.ToString() ; /// @@ -568,7 +528,7 @@ ConversationItemCreate as object /// public bool Validate() { - return IsConversationItemCreate || IsConversationItemDelete || IsConversationItemRetrieve || IsConversationItemTruncate || IsInputAudioBufferAppend || IsInputAudioBufferClear || IsOutputAudioBufferClear || IsInputAudioBufferCommit || IsResponseCancel || IsResponseCreate || IsSessionUpdate || IsTranscriptionSessionUpdate; + return IsConversationItemCreate || IsConversationItemDelete || IsConversationItemRetrieve || IsConversationItemTruncate || IsInputAudioBufferAppend || IsInputAudioBufferClear || IsOutputAudioBufferClear || IsInputAudioBufferCommit || IsResponseCancel || IsResponseCreate || IsSessionUpdate; } /// @@ -586,7 +546,6 @@ public bool Validate() global::System.Func? responseCancel = null, global::System.Func? responseCreate = null, global::System.Func? sessionUpdate = null, - global::System.Func? transcriptionSessionUpdate = null, bool validate = true) { if (validate) @@ -638,10 +597,6 @@ public bool Validate() { return sessionUpdate(SessionUpdate!); } - else if (IsTranscriptionSessionUpdate && transcriptionSessionUpdate != null) - { - return transcriptionSessionUpdate(TranscriptionSessionUpdate!); - } return default(TResult); } @@ -661,7 +616,6 @@ public void Match( global::System.Action? responseCancel = null, global::System.Action? responseCreate = null, global::System.Action? sessionUpdate = null, - global::System.Action? transcriptionSessionUpdate = null, bool validate = true) { if (validate) @@ -713,10 +667,6 @@ public void Match( { sessionUpdate?.Invoke(SessionUpdate!); } - else if (IsTranscriptionSessionUpdate) - { - transcriptionSessionUpdate?.Invoke(TranscriptionSessionUpdate!); - } } /// @@ -748,8 +698,6 @@ public override int GetHashCode() typeof(global::tryAGI.OpenAI.RealtimeClientEventResponseCreate), SessionUpdate, typeof(global::tryAGI.OpenAI.RealtimeClientEventSessionUpdate), - TranscriptionSessionUpdate, - typeof(global::tryAGI.OpenAI.RealtimeClientEventTranscriptionSessionUpdate), }; const int offset = unchecked((int)2166136261); const int prime = 16777619; @@ -776,8 +724,7 @@ public bool Equals(RealtimeClientEvent other) global::System.Collections.Generic.EqualityComparer.Default.Equals(InputAudioBufferCommit, other.InputAudioBufferCommit) && global::System.Collections.Generic.EqualityComparer.Default.Equals(ResponseCancel, other.ResponseCancel) && global::System.Collections.Generic.EqualityComparer.Default.Equals(ResponseCreate, other.ResponseCreate) && - global::System.Collections.Generic.EqualityComparer.Default.Equals(SessionUpdate, other.SessionUpdate) && - global::System.Collections.Generic.EqualityComparer.Default.Equals(TranscriptionSessionUpdate, other.TranscriptionSessionUpdate) + global::System.Collections.Generic.EqualityComparer.Default.Equals(SessionUpdate, other.SessionUpdate) ; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEventSessionUpdate.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEventSessionUpdate.g.cs index fe37adc4..1b856314 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEventSessionUpdate.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeClientEventSessionUpdate.g.cs @@ -8,8 +8,7 @@ namespace tryAGI.OpenAI /// /// Send this event to update the session’s configuration.
/// The client may send this event at any time to update any field
- /// except for `voice` and `model`. `voice` can be updated only if there have been no other
- /// audio outputs yet.
+ /// except for `voice` and `model`. `voice` can be updated only if there have been no other audio outputs yet.
/// When the server receives a `session.update`, it will respond
/// with a `session.updated` event showing the full, effective configuration.
/// Only the fields that are present in the `session.update` are updated. To clear a field like
diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.g.cs deleted file mode 100644 index 400774b6..00000000 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.g.cs +++ /dev/null @@ -1,76 +0,0 @@ - -#nullable enable - -namespace tryAGI.OpenAI -{ - /// - /// Returned when a transcription session is created. - /// - public sealed partial class RealtimeServerEventTranscriptionSessionCreated - { - /// - /// The unique ID of the server event. - /// - [global::System.Text.Json.Serialization.JsonPropertyName("event_id")] - [global::System.Text.Json.Serialization.JsonRequired] - public required string EventId { get; set; } - - /// - /// A new Realtime transcription session configuration.
- /// When a session is created on the server via REST API, the session object
- /// also contains an ephemeral key. Default TTL for keys is 10 minutes. This
- /// property is not present when a session is updated via the WebSocket API. - ///
- [global::System.Text.Json.Serialization.JsonPropertyName("session")] - [global::System.Text.Json.Serialization.JsonRequired] - public required global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse Session { get; set; } - - /// - /// The event type, must be `transcription_session.created`. - /// - [global::System.Text.Json.Serialization.JsonPropertyName("type")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeServerEventTranscriptionSessionCreatedTypeJsonConverter))] - public global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType Type { get; set; } - - /// - /// Additional properties that are not explicitly defined in the schema - /// - [global::System.Text.Json.Serialization.JsonExtensionData] - public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); - - /// - /// Initializes a new instance of the class. - /// - /// - /// The unique ID of the server event. - /// - /// - /// A new Realtime transcription session configuration.
- /// When a session is created on the server via REST API, the session object
- /// also contains an ephemeral key. Default TTL for keys is 10 minutes. This
- /// property is not present when a session is updated via the WebSocket API. - /// - /// - /// The event type, must be `transcription_session.created`. - /// -#if NET7_0_OR_GREATER - [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] -#endif - public RealtimeServerEventTranscriptionSessionCreated( - string eventId, - global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse session, - global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreatedType type) - { - this.EventId = eventId ?? throw new global::System.ArgumentNullException(nameof(eventId)); - this.Session = session ?? throw new global::System.ArgumentNullException(nameof(session)); - this.Type = type; - } - - /// - /// Initializes a new instance of the class. - /// - public RealtimeServerEventTranscriptionSessionCreated() - { - } - } -} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateRequestGAAudioInputTurnDetection.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateRequestGAAudioInputTurnDetection.g.cs index b50b60ca..fba93c43 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateRequestGAAudioInputTurnDetection.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateRequestGAAudioInputTurnDetection.g.cs @@ -27,7 +27,7 @@ public sealed partial class RealtimeSessionCreateRequestGAAudioInputTurnDetectio /// /// Optional idle timeout after which turn detection will auto-timeout when
- /// no additional audio is received. + /// no additional audio is received and emits a `timeout_triggered` event. ///
[global::System.Text.Json.Serialization.JsonPropertyName("idle_timeout_ms")] public int? IdleTimeoutMs { get; set; } @@ -90,7 +90,7 @@ public sealed partial class RealtimeSessionCreateRequestGAAudioInputTurnDetectio /// /// /// Optional idle timeout after which turn detection will auto-timeout when
- /// no additional audio is received. + /// no additional audio is received and emits a `timeout_triggered` event. /// /// /// Whether or not to automatically interrupt any ongoing response with output to the default
diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponse.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponse.g.cs index f0c372b3..112f12e7 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponse.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponse.g.cs @@ -6,33 +6,47 @@ namespace tryAGI.OpenAI { /// - /// A new Realtime session configuration, with an ephemeral key. Default TTL
- /// for keys is one minute. + /// A Realtime session configuration object. ///
public sealed partial class RealtimeSessionCreateResponse { /// - /// Configuration for input and output audio. + /// Configuration for input and output audio for the session. /// [global::System.Text.Json.Serialization.JsonPropertyName("audio")] public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudio? Audio { get; set; } /// - /// Ephemeral key returned by the API. + /// Expiration timestamp for the session, in seconds since epoch. /// - [global::System.Text.Json.Serialization.JsonPropertyName("client_secret")] - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret? ClientSecret { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("expires_at")] + public int? ExpiresAt { get; set; } + + /// + /// Unique identifier for the session that looks like `sess_1234567890abcdef`. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("id")] + public string? Id { get; set; } /// /// Additional fields to include in server outputs.
- /// `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + /// - `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. ///
[global::System.Text.Json.Serialization.JsonPropertyName("include")] public global::System.Collections.Generic.IList? Include { get; set; } /// - /// The default system instructions (i.e. system message) prepended to model calls. This field allows the client to guide the model on desired responses. The model can be instructed on response content and format, (e.g. "be extremely succinct", "act friendly", "here are examples of good responses") and on audio behavior (e.g. "talk quickly", "inject emotion into your voice", "laugh frequently"). The instructions are not guaranteed to be followed by the model, but they provide guidance to the model on the desired behavior.
- /// Note that the server sets default instructions which will be used if this field is not set and are visible in the `session.created` event at the start of the session. + /// The default system instructions (i.e. system message) prepended to model
+ /// calls. This field allows the client to guide the model on desired
+ /// responses. The model can be instructed on response content and format,
+ /// (e.g. "be extremely succinct", "act friendly", "here are examples of good
+ /// responses") and on audio behavior (e.g. "talk quickly", "inject emotion
+ /// into your voice", "laugh frequently"). The instructions are not guaranteed
+ /// to be followed by the model, but they provide guidance to the model on the
+ /// desired behavior.
+ /// Note that the server sets default instructions which will be used if this
+ /// field is not set and are visible in the `session.created` event at the
+ /// start of the session. ///
[global::System.Text.Json.Serialization.JsonPropertyName("instructions")] public string? Instructions { get; set; } @@ -51,42 +65,36 @@ public sealed partial class RealtimeSessionCreateResponse /// The Realtime model used for this session. ///
[global::System.Text.Json.Serialization.JsonPropertyName("model")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] - public global::tryAGI.OpenAI.AnyOf? Model { get; set; } + public string? Model { get; set; } /// - /// The set of modalities the model can respond with. It defaults to `["audio"]`, indicating
- /// that the model will respond with audio plus a transcript. `["text"]` can be used to make
- /// the model respond with text only. It is not possible to request both `text` and `audio` at the same time.
- /// Default Value: [audio] + /// The object type. Always `realtime.session`. ///
- [global::System.Text.Json.Serialization.JsonPropertyName("output_modalities")] - public global::System.Collections.Generic.IList? OutputModalities { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("object")] + public string? Object { get; set; } /// - /// Reference to a prompt template and its variables.
- /// [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + /// The set of modalities the model can respond with. To disable audio,
+ /// set this to ["text"]. ///
- [global::System.Text.Json.Serialization.JsonPropertyName("prompt")] - public global::tryAGI.OpenAI.Prompt2? Prompt { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("output_modalities")] + public global::System.Collections.Generic.IList? OutputModalities { get; set; } /// - /// How the model chooses tools. Provide one of the string modes or force a specific
- /// function/MCP tool.
- /// Default Value: auto + /// How the model chooses tools. Options are `auto`, `none`, `required`, or
+ /// specify a function. ///
[global::System.Text.Json.Serialization.JsonPropertyName("tool_choice")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] - public global::tryAGI.OpenAI.AnyOf? ToolChoice { get; set; } + public string? ToolChoice { get; set; } /// - /// Tools available to the model. + /// Tools (functions) available to the model. /// [global::System.Text.Json.Serialization.JsonPropertyName("tools")] - public global::System.Collections.Generic.IList>? Tools { get; set; } + public global::System.Collections.Generic.IList? Tools { get; set; } /// - /// Realtime API can write session traces to the [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
+ /// Configuration options for tracing. Set to null to disable tracing. Once
/// tracing is enabled for a session, the configuration cannot be modified.
/// `auto` will create a trace for the session with default values for the
/// workflow name, group id, and metadata. @@ -96,19 +104,12 @@ public sealed partial class RealtimeSessionCreateResponse public global::tryAGI.OpenAI.AnyOf? Tracing { get; set; } /// - /// Controls how the realtime conversation is truncated prior to model inference.
- /// The default is `auto`. + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. ///
- [global::System.Text.Json.Serialization.JsonPropertyName("truncation")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTruncationJsonConverter))] - public global::tryAGI.OpenAI.RealtimeTruncation? Truncation { get; set; } - - /// - /// The type of session to create. Always `realtime` for the Realtime API. - /// - [global::System.Text.Json.Serialization.JsonPropertyName("type")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseTypeJsonConverter))] - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseType? Type { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("turn_detection")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection? TurnDetection { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -120,18 +121,30 @@ public sealed partial class RealtimeSessionCreateResponse /// Initializes a new instance of the class. /// /// - /// Configuration for input and output audio. + /// Configuration for input and output audio for the session. + /// + /// + /// Expiration timestamp for the session, in seconds since epoch. /// - /// - /// Ephemeral key returned by the API. + /// + /// Unique identifier for the session that looks like `sess_1234567890abcdef`. /// /// /// Additional fields to include in server outputs.
- /// `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + /// - `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. /// /// - /// The default system instructions (i.e. system message) prepended to model calls. This field allows the client to guide the model on desired responses. The model can be instructed on response content and format, (e.g. "be extremely succinct", "act friendly", "here are examples of good responses") and on audio behavior (e.g. "talk quickly", "inject emotion into your voice", "laugh frequently"). The instructions are not guaranteed to be followed by the model, but they provide guidance to the model on the desired behavior.
- /// Note that the server sets default instructions which will be used if this field is not set and are visible in the `session.created` event at the start of the session. + /// The default system instructions (i.e. system message) prepended to model
+ /// calls. This field allows the client to guide the model on desired
+ /// responses. The model can be instructed on response content and format,
+ /// (e.g. "be extremely succinct", "act friendly", "here are examples of good
+ /// responses") and on audio behavior (e.g. "talk quickly", "inject emotion
+ /// into your voice", "laugh frequently"). The instructions are not guaranteed
+ /// to be followed by the model, but they provide guidance to the model on the
+ /// desired behavior.
+ /// Note that the server sets default instructions which will be used if this
+ /// field is not set and are visible in the `session.created` event at the
+ /// start of the session. /// /// /// Maximum number of output tokens for a single assistant response,
@@ -142,68 +155,62 @@ public sealed partial class RealtimeSessionCreateResponse /// /// The Realtime model used for this session. /// - /// - /// The set of modalities the model can respond with. It defaults to `["audio"]`, indicating
- /// that the model will respond with audio plus a transcript. `["text"]` can be used to make
- /// the model respond with text only. It is not possible to request both `text` and `audio` at the same time.
- /// Default Value: [audio] + /// + /// The object type. Always `realtime.session`. /// - /// - /// Reference to a prompt template and its variables.
- /// [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + /// + /// The set of modalities the model can respond with. To disable audio,
+ /// set this to ["text"]. /// /// - /// How the model chooses tools. Provide one of the string modes or force a specific
- /// function/MCP tool.
- /// Default Value: auto + /// How the model chooses tools. Options are `auto`, `none`, `required`, or
+ /// specify a function. /// /// - /// Tools available to the model. + /// Tools (functions) available to the model. /// /// - /// Realtime API can write session traces to the [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
+ /// Configuration options for tracing. Set to null to disable tracing. Once
/// tracing is enabled for a session, the configuration cannot be modified.
/// `auto` will create a trace for the session with default values for the
/// workflow name, group id, and metadata. /// - /// - /// Controls how the realtime conversation is truncated prior to model inference.
- /// The default is `auto`. - /// - /// - /// The type of session to create. Always `realtime` for the Realtime API. + /// + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public RealtimeSessionCreateResponse( global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudio? audio, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret? clientSecret, + int? expiresAt, + string? id, global::System.Collections.Generic.IList? include, string? instructions, global::tryAGI.OpenAI.AnyOf? maxOutputTokens, - global::tryAGI.OpenAI.AnyOf? model, + string? model, + string? @object, global::System.Collections.Generic.IList? outputModalities, - global::tryAGI.OpenAI.Prompt2? prompt, - global::tryAGI.OpenAI.AnyOf? toolChoice, - global::System.Collections.Generic.IList>? tools, + string? toolChoice, + global::System.Collections.Generic.IList? tools, global::tryAGI.OpenAI.AnyOf? tracing, - global::tryAGI.OpenAI.RealtimeTruncation? truncation, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseType? type) + global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection? turnDetection) { this.Audio = audio; - this.ClientSecret = clientSecret; + this.ExpiresAt = expiresAt; + this.Id = id; this.Include = include; this.Instructions = instructions; this.MaxOutputTokens = maxOutputTokens; this.Model = model; + this.Object = @object; this.OutputModalities = outputModalities; - this.Prompt = prompt; this.ToolChoice = toolChoice; this.Tools = tools; this.Tracing = tracing; - this.Truncation = truncation; - this.Type = type; + this.TurnDetection = turnDetection; } /// diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudio.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudio.g.cs index ee3158da..96f5b166 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudio.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudio.g.cs @@ -4,7 +4,7 @@ namespace tryAGI.OpenAI { /// - /// Configuration for input and output audio. + /// Configuration for input and output audio for the session. /// public sealed partial class RealtimeSessionCreateResponseAudio { diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInput.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInput.g.cs index dbdb098c..9fd8470f 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInput.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInput.g.cs @@ -16,9 +16,7 @@ public sealed partial class RealtimeSessionCreateResponseAudioInput public global::tryAGI.OpenAI.RealtimeAudioFormats? Format { get; set; } /// - /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
- /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
- /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + /// Configuration for input audio noise reduction. ///
[global::System.Text.Json.Serialization.JsonPropertyName("noise_reduction")] public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputNoiseReduction? NoiseReduction { get; set; } @@ -30,9 +28,7 @@ public sealed partial class RealtimeSessionCreateResponseAudioInput public global::tryAGI.OpenAI.AudioTranscription? Transcription { get; set; } /// - /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
- /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
- /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + /// Configuration for turn detection. ///
[global::System.Text.Json.Serialization.JsonPropertyName("turn_detection")] public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetection? TurnDetection { get; set; } @@ -48,15 +44,11 @@ public sealed partial class RealtimeSessionCreateResponseAudioInput ///
/// /// - /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
- /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
- /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + /// Configuration for input audio noise reduction. /// /// /// - /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
- /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
- /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + /// Configuration for turn detection. /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputNoiseReduction.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputNoiseReduction.g.cs index 0412f1e9..3d0dcb74 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputNoiseReduction.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputNoiseReduction.g.cs @@ -4,9 +4,7 @@ namespace tryAGI.OpenAI { /// - /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
- /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
- /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + /// Configuration for input audio noise reduction. ///
public sealed partial class RealtimeSessionCreateResponseAudioInputNoiseReduction { diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetection.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetection.g.cs index af4ed498..1b0aa146 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetection.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetection.g.cs @@ -4,72 +4,33 @@ namespace tryAGI.OpenAI { /// - /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
- /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
- /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + /// Configuration for turn detection. ///
public sealed partial class RealtimeSessionCreateResponseAudioInputTurnDetection { /// - /// Whether or not to automatically generate a response when a VAD stop event occurs.
- /// Default Value: true - ///
- [global::System.Text.Json.Serialization.JsonPropertyName("create_response")] - public bool? CreateResponse { get; set; } - - /// - /// Used only for `semantic_vad` mode. The eagerness of the model to respond. `low` will wait longer for the user to continue speaking, `high` will respond more quickly. `auto` is the default and is equivalent to `medium`. `low`, `medium`, and `high` have max timeouts of 8s, 4s, and 2s respectively.
- /// Default Value: auto - ///
- [global::System.Text.Json.Serialization.JsonPropertyName("eagerness")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessJsonConverter))] - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? Eagerness { get; set; } - - /// - /// Optional idle timeout after which turn detection will auto-timeout when
- /// no additional audio is received. - ///
- [global::System.Text.Json.Serialization.JsonPropertyName("idle_timeout_ms")] - public int? IdleTimeoutMs { get; set; } - - /// - /// Whether or not to automatically interrupt any ongoing response with output to the default
- /// conversation (i.e. `conversation` of `auto`) when a VAD start event occurs.
- /// Default Value: true - ///
- [global::System.Text.Json.Serialization.JsonPropertyName("interrupt_response")] - public bool? InterruptResponse { get; set; } - - /// - /// Used only for `server_vad` mode. Amount of audio to include before the VAD detected speech (in
- /// milliseconds). Defaults to 300ms. + /// ///
[global::System.Text.Json.Serialization.JsonPropertyName("prefix_padding_ms")] public int? PrefixPaddingMs { get; set; } /// - /// Used only for `server_vad` mode. Duration of silence to detect speech stop (in milliseconds). Defaults
- /// to 500ms. With shorter values the model will respond more quickly,
- /// but may jump in on short pauses from the user. + /// ///
[global::System.Text.Json.Serialization.JsonPropertyName("silence_duration_ms")] public int? SilenceDurationMs { get; set; } /// - /// Used only for `server_vad` mode. Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
- /// higher threshold will require louder audio to activate the model, and
- /// thus might perform better in noisy environments. + /// ///
[global::System.Text.Json.Serialization.JsonPropertyName("threshold")] public double? Threshold { get; set; } /// - /// Type of turn detection.
- /// Default Value: server_vad + /// Type of turn detection, only `server_vad` is currently supported. ///
[global::System.Text.Json.Serialization.JsonPropertyName("type")] - [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseAudioInputTurnDetectionTypeJsonConverter))] - public global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType? Type { get; set; } + public string? Type { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -80,58 +41,21 @@ public sealed partial class RealtimeSessionCreateResponseAudioInputTurnDetection /// /// Initializes a new instance of the class. /// - /// - /// Whether or not to automatically generate a response when a VAD stop event occurs.
- /// Default Value: true - /// - /// - /// Used only for `semantic_vad` mode. The eagerness of the model to respond. `low` will wait longer for the user to continue speaking, `high` will respond more quickly. `auto` is the default and is equivalent to `medium`. `low`, `medium`, and `high` have max timeouts of 8s, 4s, and 2s respectively.
- /// Default Value: auto - /// - /// - /// Optional idle timeout after which turn detection will auto-timeout when
- /// no additional audio is received. - /// - /// - /// Whether or not to automatically interrupt any ongoing response with output to the default
- /// conversation (i.e. `conversation` of `auto`) when a VAD start event occurs.
- /// Default Value: true - /// - /// - /// Used only for `server_vad` mode. Amount of audio to include before the VAD detected speech (in
- /// milliseconds). Defaults to 300ms. - /// - /// - /// Used only for `server_vad` mode. Duration of silence to detect speech stop (in milliseconds). Defaults
- /// to 500ms. With shorter values the model will respond more quickly,
- /// but may jump in on short pauses from the user. - /// - /// - /// Used only for `server_vad` mode. Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
- /// higher threshold will require louder audio to activate the model, and
- /// thus might perform better in noisy environments. - /// + /// + /// + /// /// - /// Type of turn detection.
- /// Default Value: server_vad + /// Type of turn detection, only `server_vad` is currently supported. /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public RealtimeSessionCreateResponseAudioInputTurnDetection( - bool? createResponse, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? eagerness, - int? idleTimeoutMs, - bool? interruptResponse, int? prefixPaddingMs, int? silenceDurationMs, double? threshold, - global::tryAGI.OpenAI.RealtimeSessionCreateResponseAudioInputTurnDetectionType? type) + string? type) { - this.CreateResponse = createResponse; - this.Eagerness = eagerness; - this.IdleTimeoutMs = idleTimeoutMs; - this.InterruptResponse = interruptResponse; this.PrefixPaddingMs = prefixPaddingMs; this.SilenceDurationMs = silenceDurationMs; this.Threshold = threshold; diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioOutput.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioOutput.g.cs index 3888db28..a81bcdcf 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioOutput.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioOutput.g.cs @@ -16,11 +16,7 @@ public sealed partial class RealtimeSessionCreateResponseAudioOutput public global::tryAGI.OpenAI.RealtimeAudioFormats? Format { get; set; } /// - /// The speed of the model's spoken response as a multiple of the original speed.
- /// 1.0 is the default speed. 0.25 is the minimum speed. 1.5 is the maximum speed. This value can only be changed in between model turns, not while a response is in progress.
- /// This parameter is a post-processing adjustment to the audio after it is generated, it's
- /// also possible to prompt the model to speak faster or slower.
- /// Default Value: 1 + /// ///
[global::System.Text.Json.Serialization.JsonPropertyName("speed")] public double? Speed { get; set; } @@ -43,13 +39,7 @@ public sealed partial class RealtimeSessionCreateResponseAudioOutput /// Initializes a new instance of the class. ///
/// - /// - /// The speed of the model's spoken response as a multiple of the original speed.
- /// 1.0 is the default speed. 0.25 is the minimum speed. 1.5 is the maximum speed. This value can only be changed in between model turns, not while a response is in progress.
- /// This parameter is a post-processing adjustment to the audio after it is generated, it's
- /// also possible to prompt the model to speak faster or slower.
- /// Default Value: 1 - /// + /// /// /// Example: ash /// diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.Json.g.cs similarity index 82% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.Json.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.Json.g.cs index 9406e821..302bb630 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreated.Json.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.Json.g.cs @@ -2,7 +2,7 @@ namespace tryAGI.OpenAI { - public sealed partial class RealtimeServerEventTranscriptionSessionCreated + public sealed partial class RealtimeSessionCreateResponseGA { /// /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. @@ -34,14 +34,14 @@ public string ToJson( /// /// Deserializes a JSON string using the provided JsonSerializerContext. /// - public static global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated? FromJson( + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? FromJson( string json, global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) { return global::System.Text.Json.JsonSerializer.Deserialize( json, - typeof(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated), - jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated; + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA; } /// @@ -51,11 +51,11 @@ public string ToJson( [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] #endif - public static global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated? FromJson( + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? FromJson( string json, global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) { - return global::System.Text.Json.JsonSerializer.Deserialize( + return global::System.Text.Json.JsonSerializer.Deserialize( json, jsonSerializerOptions); } @@ -63,14 +63,14 @@ public string ToJson( /// /// Deserializes a JSON stream using the provided JsonSerializerContext. /// - public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( global::System.IO.Stream jsonStream, global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) { return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( jsonStream, - typeof(global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated), - jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeServerEventTranscriptionSessionCreated; + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA; } /// @@ -80,11 +80,11 @@ public string ToJson( [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] #endif - public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( global::System.IO.Stream jsonStream, global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) { - return global::System.Text.Json.JsonSerializer.DeserializeAsync( + return global::System.Text.Json.JsonSerializer.DeserializeAsync( jsonStream, jsonSerializerOptions); } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.g.cs new file mode 100644 index 00000000..d2716417 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGA.g.cs @@ -0,0 +1,217 @@ + +#pragma warning disable CS0618 // Type or member is obsolete + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// A new Realtime session configuration, with an ephemeral key. Default TTL
+ /// for keys is one minute. + ///
+ public sealed partial class RealtimeSessionCreateResponseGA + { + /// + /// Configuration for input and output audio. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("audio")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio? Audio { get; set; } + + /// + /// Ephemeral key returned by the API. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("client_secret")] + [global::System.Text.Json.Serialization.JsonRequired] + public required global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret ClientSecret { get; set; } + + /// + /// Additional fields to include in server outputs.
+ /// `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("include")] + public global::System.Collections.Generic.IList? Include { get; set; } + + /// + /// The default system instructions (i.e. system message) prepended to model calls. This field allows the client to guide the model on desired responses. The model can be instructed on response content and format, (e.g. "be extremely succinct", "act friendly", "here are examples of good responses") and on audio behavior (e.g. "talk quickly", "inject emotion into your voice", "laugh frequently"). The instructions are not guaranteed to be followed by the model, but they provide guidance to the model on the desired behavior.
+ /// Note that the server sets default instructions which will be used if this field is not set and are visible in the `session.created` event at the start of the session. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("instructions")] + public string? Instructions { get; set; } + + /// + /// Maximum number of output tokens for a single assistant response,
+ /// inclusive of tool calls. Provide an integer between 1 and 4096 to
+ /// limit output tokens, or `inf` for the maximum available tokens for a
+ /// given model. Defaults to `inf`. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("max_output_tokens")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] + public global::tryAGI.OpenAI.AnyOf? MaxOutputTokens { get; set; } + + /// + /// The Realtime model used for this session. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("model")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] + public global::tryAGI.OpenAI.AnyOf? Model { get; set; } + + /// + /// The set of modalities the model can respond with. It defaults to `["audio"]`, indicating
+ /// that the model will respond with audio plus a transcript. `["text"]` can be used to make
+ /// the model respond with text only. It is not possible to request both `text` and `audio` at the same time.
+ /// Default Value: [audio] + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("output_modalities")] + public global::System.Collections.Generic.IList? OutputModalities { get; set; } + + /// + /// Reference to a prompt template and its variables.
+ /// [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("prompt")] + public global::tryAGI.OpenAI.Prompt2? Prompt { get; set; } + + /// + /// How the model chooses tools. Provide one of the string modes or force a specific
+ /// function/MCP tool.
+ /// Default Value: auto + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("tool_choice")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] + public global::tryAGI.OpenAI.AnyOf? ToolChoice { get; set; } + + /// + /// Tools available to the model. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("tools")] + public global::System.Collections.Generic.IList>? Tools { get; set; } + + /// + /// Realtime API can write session traces to the [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
+ /// tracing is enabled for a session, the configuration cannot be modified.
+ /// `auto` will create a trace for the session with default values for the
+ /// workflow name, group id, and metadata. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("tracing")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.AnyOfJsonConverter))] + public global::tryAGI.OpenAI.AnyOf? Tracing { get; set; } + + /// + /// Controls how the realtime conversation is truncated prior to model inference.
+ /// The default is `auto`. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("truncation")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTruncationJsonConverter))] + public global::tryAGI.OpenAI.RealtimeTruncation? Truncation { get; set; } + + /// + /// The type of session to create. Always `realtime` for the Realtime API. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGATypeJsonConverter))] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Configuration for input and output audio. + /// + /// + /// Ephemeral key returned by the API. + /// + /// + /// Additional fields to include in server outputs.
+ /// `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + /// + /// + /// The default system instructions (i.e. system message) prepended to model calls. This field allows the client to guide the model on desired responses. The model can be instructed on response content and format, (e.g. "be extremely succinct", "act friendly", "here are examples of good responses") and on audio behavior (e.g. "talk quickly", "inject emotion into your voice", "laugh frequently"). The instructions are not guaranteed to be followed by the model, but they provide guidance to the model on the desired behavior.
+ /// Note that the server sets default instructions which will be used if this field is not set and are visible in the `session.created` event at the start of the session. + /// + /// + /// Maximum number of output tokens for a single assistant response,
+ /// inclusive of tool calls. Provide an integer between 1 and 4096 to
+ /// limit output tokens, or `inf` for the maximum available tokens for a
+ /// given model. Defaults to `inf`. + /// + /// + /// The Realtime model used for this session. + /// + /// + /// The set of modalities the model can respond with. It defaults to `["audio"]`, indicating
+ /// that the model will respond with audio plus a transcript. `["text"]` can be used to make
+ /// the model respond with text only. It is not possible to request both `text` and `audio` at the same time.
+ /// Default Value: [audio] + /// + /// + /// Reference to a prompt template and its variables.
+ /// [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + /// + /// + /// How the model chooses tools. Provide one of the string modes or force a specific
+ /// function/MCP tool.
+ /// Default Value: auto + /// + /// + /// Tools available to the model. + /// + /// + /// Realtime API can write session traces to the [Traces Dashboard](/logs?api=traces). Set to null to disable tracing. Once
+ /// tracing is enabled for a session, the configuration cannot be modified.
+ /// `auto` will create a trace for the session with default values for the
+ /// workflow name, group id, and metadata. + /// + /// + /// Controls how the realtime conversation is truncated prior to model inference.
+ /// The default is `auto`. + /// + /// + /// The type of session to create. Always `realtime` for the Realtime API. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGA( + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret clientSecret, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio? audio, + global::System.Collections.Generic.IList? include, + string? instructions, + global::tryAGI.OpenAI.AnyOf? maxOutputTokens, + global::tryAGI.OpenAI.AnyOf? model, + global::System.Collections.Generic.IList? outputModalities, + global::tryAGI.OpenAI.Prompt2? prompt, + global::tryAGI.OpenAI.AnyOf? toolChoice, + global::System.Collections.Generic.IList>? tools, + global::tryAGI.OpenAI.AnyOf? tracing, + global::tryAGI.OpenAI.RealtimeTruncation? truncation, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAType type) + { + this.ClientSecret = clientSecret ?? throw new global::System.ArgumentNullException(nameof(clientSecret)); + this.Audio = audio; + this.Include = include; + this.Instructions = instructions; + this.MaxOutputTokens = maxOutputTokens; + this.Model = model; + this.OutputModalities = outputModalities; + this.Prompt = prompt; + this.ToolChoice = toolChoice; + this.Tools = tools; + this.Tracing = tracing; + this.Truncation = truncation; + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGA() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.Json.g.cs new file mode 100644 index 00000000..f5dba3bb --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGAAudio + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudio; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.g.cs new file mode 100644 index 00000000..7685d269 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudio.g.cs @@ -0,0 +1,52 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for input and output audio. + /// + public sealed partial class RealtimeSessionCreateResponseGAAudio + { + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("input")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput? Input { get; set; } + + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("output")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput? Output { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGAAudio( + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput? input, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput? output) + { + this.Input = input; + this.Output = output; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGAAudio() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.Json.g.cs similarity index 88% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.Json.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.Json.g.cs index 37ada745..8277d29a 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.Json.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.Json.g.cs @@ -2,7 +2,7 @@ namespace tryAGI.OpenAI { - public sealed partial class RealtimeSessionCreateResponseClientSecret + public sealed partial class RealtimeSessionCreateResponseGAAudioInput { /// /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. @@ -34,14 +34,14 @@ public string ToJson( /// /// Deserializes a JSON string using the provided JsonSerializerContext. /// - public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret? FromJson( + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput? FromJson( string json, global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) { return global::System.Text.Json.JsonSerializer.Deserialize( json, - typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret), - jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret; + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput; } /// @@ -51,11 +51,11 @@ public string ToJson( [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] #endif - public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret? FromJson( + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput? FromJson( string json, global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) { - return global::System.Text.Json.JsonSerializer.Deserialize( + return global::System.Text.Json.JsonSerializer.Deserialize( json, jsonSerializerOptions); } @@ -63,14 +63,14 @@ public string ToJson( /// /// Deserializes a JSON stream using the provided JsonSerializerContext. /// - public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( global::System.IO.Stream jsonStream, global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) { return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( jsonStream, - typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret), - jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseClientSecret; + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInput; } /// @@ -80,11 +80,11 @@ public string ToJson( [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] #endif - public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( global::System.IO.Stream jsonStream, global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) { - return global::System.Text.Json.JsonSerializer.DeserializeAsync( + return global::System.Text.Json.JsonSerializer.DeserializeAsync( jsonStream, jsonSerializerOptions); } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.g.cs new file mode 100644 index 00000000..ebf7c5fc --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInput.g.cs @@ -0,0 +1,83 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public sealed partial class RealtimeSessionCreateResponseGAAudioInput + { + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("format")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeAudioFormatsJsonConverter))] + public global::tryAGI.OpenAI.RealtimeAudioFormats? Format { get; set; } + + /// + /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
+ /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
+ /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("noise_reduction")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction? NoiseReduction { get; set; } + + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("transcription")] + public global::tryAGI.OpenAI.AudioTranscription? Transcription { get; set; } + + /// + /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
+ /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
+ /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("turn_detection")] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection? TurnDetection { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// + /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
+ /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
+ /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + /// + /// + /// + /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
+ /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
+ /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGAAudioInput( + global::tryAGI.OpenAI.RealtimeAudioFormats? format, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction? noiseReduction, + global::tryAGI.OpenAI.AudioTranscription? transcription, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection? turnDetection) + { + this.Format = format; + this.NoiseReduction = noiseReduction; + this.Transcription = transcription; + this.TurnDetection = turnDetection; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGAAudioInput() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs new file mode 100644 index 00000000..71dff0c4 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGAAudioInputNoiseReduction + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputNoiseReduction; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.g.cs new file mode 100644 index 00000000..8247cc31 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputNoiseReduction.g.cs @@ -0,0 +1,48 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for input audio noise reduction. This can be set to `null` to turn off.
+ /// Noise reduction filters audio added to the input audio buffer before it is sent to VAD and the model.
+ /// Filtering the audio can improve VAD and turn detection accuracy (reducing false positives) and model performance by improving perception of the input audio. + ///
+ public sealed partial class RealtimeSessionCreateResponseGAAudioInputNoiseReduction + { + /// + /// Type of noise reduction. `near_field` is for close-talking microphones such as headphones, `far_field` is for far-field microphones such as laptop or conference room microphones. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.NoiseReductionTypeJsonConverter))] + public global::tryAGI.OpenAI.NoiseReductionType? Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Type of noise reduction. `near_field` is for close-talking microphones such as headphones, `far_field` is for far-field microphones such as laptop or conference room microphones. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGAAudioInputNoiseReduction( + global::tryAGI.OpenAI.NoiseReductionType? type) + { + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGAAudioInputNoiseReduction() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs new file mode 100644 index 00000000..32db292d --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGAAudioInputTurnDetection + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetection; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.g.cs new file mode 100644 index 00000000..b6d43034 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetection.g.cs @@ -0,0 +1,148 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for turn detection, ether Server VAD or Semantic VAD. This can be set to `null` to turn off, in which case the client must manually trigger model response.
+ /// Server VAD means that the model will detect the start and end of speech based on audio volume and respond at the end of user speech.
+ /// Semantic VAD is more advanced and uses a turn detection model (in conjunction with VAD) to semantically estimate whether the user has finished speaking, then dynamically sets a timeout based on this probability. For example, if user audio trails off with "uhhm", the model will score a low probability of turn end and wait longer for the user to continue speaking. This can be useful for more natural conversations, but may have a higher latency. + ///
+ public sealed partial class RealtimeSessionCreateResponseGAAudioInputTurnDetection + { + /// + /// Whether or not to automatically generate a response when a VAD stop event occurs.
+ /// Default Value: true + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("create_response")] + public bool? CreateResponse { get; set; } + + /// + /// Used only for `semantic_vad` mode. The eagerness of the model to respond. `low` will wait longer for the user to continue speaking, `high` will respond more quickly. `auto` is the default and is equivalent to `medium`. `low`, `medium`, and `high` have max timeouts of 8s, 4s, and 2s respectively.
+ /// Default Value: auto + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("eagerness")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessJsonConverter))] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? Eagerness { get; set; } + + /// + /// Optional idle timeout after which turn detection will auto-timeout when
+ /// no additional audio is received and emits a `timeout_triggered` event. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("idle_timeout_ms")] + public int? IdleTimeoutMs { get; set; } + + /// + /// Whether or not to automatically interrupt any ongoing response with output to the default
+ /// conversation (i.e. `conversation` of `auto`) when a VAD start event occurs.
+ /// Default Value: true + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("interrupt_response")] + public bool? InterruptResponse { get; set; } + + /// + /// Used only for `server_vad` mode. Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("prefix_padding_ms")] + public int? PrefixPaddingMs { get; set; } + + /// + /// Used only for `server_vad` mode. Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("silence_duration_ms")] + public int? SilenceDurationMs { get; set; } + + /// + /// Used only for `server_vad` mode. Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("threshold")] + public double? Threshold { get; set; } + + /// + /// Type of turn detection.
+ /// Default Value: server_vad + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeJsonConverter))] + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Whether or not to automatically generate a response when a VAD stop event occurs.
+ /// Default Value: true + /// + /// + /// Used only for `semantic_vad` mode. The eagerness of the model to respond. `low` will wait longer for the user to continue speaking, `high` will respond more quickly. `auto` is the default and is equivalent to `medium`. `low`, `medium`, and `high` have max timeouts of 8s, 4s, and 2s respectively.
+ /// Default Value: auto + /// + /// + /// Optional idle timeout after which turn detection will auto-timeout when
+ /// no additional audio is received and emits a `timeout_triggered` event. + /// + /// + /// Whether or not to automatically interrupt any ongoing response with output to the default
+ /// conversation (i.e. `conversation` of `auto`) when a VAD start event occurs.
+ /// Default Value: true + /// + /// + /// Used only for `server_vad` mode. Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + /// + /// + /// Used only for `server_vad` mode. Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + /// + /// + /// Used only for `server_vad` mode. Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + /// + /// + /// Type of turn detection.
+ /// Default Value: server_vad + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGAAudioInputTurnDetection( + bool? createResponse, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? eagerness, + int? idleTimeoutMs, + bool? interruptResponse, + int? prefixPaddingMs, + int? silenceDurationMs, + double? threshold, + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? type) + { + this.CreateResponse = createResponse; + this.Eagerness = eagerness; + this.IdleTimeoutMs = idleTimeoutMs; + this.InterruptResponse = interruptResponse; + this.PrefixPaddingMs = prefixPaddingMs; + this.SilenceDurationMs = silenceDurationMs; + this.Threshold = threshold; + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGAAudioInputTurnDetection() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs similarity index 55% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs index ce1cc9c4..6a25a52a 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.g.cs @@ -7,7 +7,7 @@ namespace tryAGI.OpenAI /// Used only for `semantic_vad` mode. The eagerness of the model to respond. `low` will wait longer for the user to continue speaking, `high` will respond more quickly. `auto` is the default and is equivalent to `medium`. `low`, `medium`, and `high` have max timeouts of 8s, 4s, and 2s respectively.
/// Default Value: auto ///
- public enum RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness + public enum RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness { /// /// @@ -30,33 +30,33 @@ public enum RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness /// /// Enum extensions to do fast conversions without the reflection. /// - public static class RealtimeSessionCreateResponseAudioInputTurnDetectionEagernessExtensions + public static class RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagernessExtensions { /// /// Converts an enum to a string. /// - public static string ToValueString(this RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness value) + public static string ToValueString(this RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness value) { return value switch { - RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Low => "low", - RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Medium => "medium", - RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.High => "high", - RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Auto => "auto", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Low => "low", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Medium => "medium", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.High => "high", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Auto => "auto", _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), }; } /// /// Converts an string to a enum. /// - public static RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness? ToEnum(string value) + public static RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness? ToEnum(string value) { return value switch { - "low" => RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Low, - "medium" => RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Medium, - "high" => RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.High, - "auto" => RealtimeSessionCreateResponseAudioInputTurnDetectionEagerness.Auto, + "low" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Low, + "medium" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Medium, + "high" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.High, + "auto" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionEagerness.Auto, _ => null, }; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs similarity index 56% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs index aeb70c6e..c00ab2ce 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseAudioInputTurnDetectionType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.g.cs @@ -7,7 +7,7 @@ namespace tryAGI.OpenAI /// Type of turn detection.
/// Default Value: server_vad ///
- public enum RealtimeSessionCreateResponseAudioInputTurnDetectionType + public enum RealtimeSessionCreateResponseGAAudioInputTurnDetectionType { /// /// @@ -22,29 +22,29 @@ public enum RealtimeSessionCreateResponseAudioInputTurnDetectionType /// /// Enum extensions to do fast conversions without the reflection. /// - public static class RealtimeSessionCreateResponseAudioInputTurnDetectionTypeExtensions + public static class RealtimeSessionCreateResponseGAAudioInputTurnDetectionTypeExtensions { /// /// Converts an enum to a string. /// - public static string ToValueString(this RealtimeSessionCreateResponseAudioInputTurnDetectionType value) + public static string ToValueString(this RealtimeSessionCreateResponseGAAudioInputTurnDetectionType value) { return value switch { - RealtimeSessionCreateResponseAudioInputTurnDetectionType.ServerVad => "server_vad", - RealtimeSessionCreateResponseAudioInputTurnDetectionType.SemanticVad => "semantic_vad", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.ServerVad => "server_vad", + RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.SemanticVad => "semantic_vad", _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), }; } /// /// Converts an string to a enum. /// - public static RealtimeSessionCreateResponseAudioInputTurnDetectionType? ToEnum(string value) + public static RealtimeSessionCreateResponseGAAudioInputTurnDetectionType? ToEnum(string value) { return value switch { - "server_vad" => RealtimeSessionCreateResponseAudioInputTurnDetectionType.ServerVad, - "semantic_vad" => RealtimeSessionCreateResponseAudioInputTurnDetectionType.SemanticVad, + "server_vad" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.ServerVad, + "semantic_vad" => RealtimeSessionCreateResponseGAAudioInputTurnDetectionType.SemanticVad, _ => null, }; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.Json.g.cs new file mode 100644 index 00000000..92bd5e81 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGAAudioOutput + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAAudioOutput; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.g.cs new file mode 100644 index 00000000..26723ac9 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAAudioOutput.g.cs @@ -0,0 +1,76 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public sealed partial class RealtimeSessionCreateResponseGAAudioOutput + { + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("format")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeAudioFormatsJsonConverter))] + public global::tryAGI.OpenAI.RealtimeAudioFormats? Format { get; set; } + + /// + /// The speed of the model's spoken response as a multiple of the original speed.
+ /// 1.0 is the default speed. 0.25 is the minimum speed. 1.5 is the maximum speed. This value can only be changed in between model turns, not while a response is in progress.
+ /// This parameter is a post-processing adjustment to the audio after it is generated, it's
+ /// also possible to prompt the model to speak faster or slower.
+ /// Default Value: 1 + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("speed")] + public double? Speed { get; set; } + + /// + /// Example: ash + /// + /// ash + [global::System.Text.Json.Serialization.JsonPropertyName("voice")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.VoiceIdsSharedJsonConverter))] + public global::tryAGI.OpenAI.VoiceIdsShared? Voice { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// + /// The speed of the model's spoken response as a multiple of the original speed.
+ /// 1.0 is the default speed. 0.25 is the minimum speed. 1.5 is the maximum speed. This value can only be changed in between model turns, not while a response is in progress.
+ /// This parameter is a post-processing adjustment to the audio after it is generated, it's
+ /// also possible to prompt the model to speak faster or slower.
+ /// Default Value: 1 + /// + /// + /// Example: ash + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGAAudioOutput( + global::tryAGI.OpenAI.RealtimeAudioFormats? format, + double? speed, + global::tryAGI.OpenAI.VoiceIdsShared? voice) + { + this.Format = format; + this.Speed = speed; + this.Voice = voice; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGAAudioOutput() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.Json.g.cs new file mode 100644 index 00000000..ad4dfe81 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGAClientSecret + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGAClientSecret; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.g.cs similarity index 88% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.g.cs index 9c29f228..ac6a0647 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseClientSecret.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAClientSecret.g.cs @@ -6,7 +6,7 @@ namespace tryAGI.OpenAI /// /// Ephemeral key returned by the API. /// - public sealed partial class RealtimeSessionCreateResponseClientSecret + public sealed partial class RealtimeSessionCreateResponseGAClientSecret { /// /// Timestamp for when the token expires. Currently, all tokens expire
@@ -30,7 +30,7 @@ public sealed partial class RealtimeSessionCreateResponseClientSecret public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// /// Timestamp for when the token expires. Currently, all tokens expire
@@ -42,7 +42,7 @@ public sealed partial class RealtimeSessionCreateResponseClientSecret #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif - public RealtimeSessionCreateResponseClientSecret( + public RealtimeSessionCreateResponseGAClientSecret( int expiresAt, string value) { @@ -51,9 +51,9 @@ public RealtimeSessionCreateResponseClientSecret( } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - public RealtimeSessionCreateResponseClientSecret() + public RealtimeSessionCreateResponseGAClientSecret() { } } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAIncludeItem.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAIncludeItem.g.cs new file mode 100644 index 00000000..9444ba63 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAIncludeItem.g.cs @@ -0,0 +1,45 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public enum RealtimeSessionCreateResponseGAIncludeItem + { + /// + /// + /// + ItemInputAudioTranscriptionLogprobs, + } + + /// + /// Enum extensions to do fast conversions without the reflection. + /// + public static class RealtimeSessionCreateResponseGAIncludeItemExtensions + { + /// + /// Converts an enum to a string. + /// + public static string ToValueString(this RealtimeSessionCreateResponseGAIncludeItem value) + { + return value switch + { + RealtimeSessionCreateResponseGAIncludeItem.ItemInputAudioTranscriptionLogprobs => "item.input_audio_transcription.logprobs", + _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), + }; + } + /// + /// Converts an string to a enum. + /// + public static RealtimeSessionCreateResponseGAIncludeItem? ToEnum(string value) + { + return value switch + { + "item.input_audio_transcription.logprobs" => RealtimeSessionCreateResponseGAIncludeItem.ItemInputAudioTranscriptionLogprobs, + _ => null, + }; + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs new file mode 100644 index 00000000..ee96a32e --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAMaxOutputTokens.g.cs @@ -0,0 +1,45 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public enum RealtimeSessionCreateResponseGAMaxOutputTokens + { + /// + /// + /// + Inf, + } + + /// + /// Enum extensions to do fast conversions without the reflection. + /// + public static class RealtimeSessionCreateResponseGAMaxOutputTokensExtensions + { + /// + /// Converts an enum to a string. + /// + public static string ToValueString(this RealtimeSessionCreateResponseGAMaxOutputTokens value) + { + return value switch + { + RealtimeSessionCreateResponseGAMaxOutputTokens.Inf => "inf", + _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), + }; + } + /// + /// Converts an string to a enum. + /// + public static RealtimeSessionCreateResponseGAMaxOutputTokens? ToEnum(string value) + { + return value switch + { + "inf" => RealtimeSessionCreateResponseGAMaxOutputTokens.Inf, + _ => null, + }; + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseModel.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAModel.g.cs similarity index 54% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseModel.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAModel.g.cs index f70ee88a..821b9d51 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseModel.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAModel.g.cs @@ -6,7 +6,7 @@ namespace tryAGI.OpenAI /// /// /// - public enum RealtimeSessionCreateResponseModel + public enum RealtimeSessionCreateResponseGAModel { /// /// @@ -45,41 +45,41 @@ public enum RealtimeSessionCreateResponseModel /// /// Enum extensions to do fast conversions without the reflection. /// - public static class RealtimeSessionCreateResponseModelExtensions + public static class RealtimeSessionCreateResponseGAModelExtensions { /// /// Converts an enum to a string. /// - public static string ToValueString(this RealtimeSessionCreateResponseModel value) + public static string ToValueString(this RealtimeSessionCreateResponseGAModel value) { return value switch { - RealtimeSessionCreateResponseModel.GptRealtime => "gpt-realtime", - RealtimeSessionCreateResponseModel.GptRealtime20250828 => "gpt-realtime-2025-08-28", - RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview => "gpt-4o-realtime-preview", - RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20241001 => "gpt-4o-realtime-preview-2024-10-01", - RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20241217 => "gpt-4o-realtime-preview-2024-12-17", - RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20250603 => "gpt-4o-realtime-preview-2025-06-03", - RealtimeSessionCreateResponseModel.Gpt4oMiniRealtimePreview => "gpt-4o-mini-realtime-preview", - RealtimeSessionCreateResponseModel.Gpt4oMiniRealtimePreview20241217 => "gpt-4o-mini-realtime-preview-2024-12-17", + RealtimeSessionCreateResponseGAModel.GptRealtime => "gpt-realtime", + RealtimeSessionCreateResponseGAModel.GptRealtime20250828 => "gpt-realtime-2025-08-28", + RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview => "gpt-4o-realtime-preview", + RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20241001 => "gpt-4o-realtime-preview-2024-10-01", + RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20241217 => "gpt-4o-realtime-preview-2024-12-17", + RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20250603 => "gpt-4o-realtime-preview-2025-06-03", + RealtimeSessionCreateResponseGAModel.Gpt4oMiniRealtimePreview => "gpt-4o-mini-realtime-preview", + RealtimeSessionCreateResponseGAModel.Gpt4oMiniRealtimePreview20241217 => "gpt-4o-mini-realtime-preview-2024-12-17", _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), }; } /// /// Converts an string to a enum. /// - public static RealtimeSessionCreateResponseModel? ToEnum(string value) + public static RealtimeSessionCreateResponseGAModel? ToEnum(string value) { return value switch { - "gpt-realtime" => RealtimeSessionCreateResponseModel.GptRealtime, - "gpt-realtime-2025-08-28" => RealtimeSessionCreateResponseModel.GptRealtime20250828, - "gpt-4o-realtime-preview" => RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview, - "gpt-4o-realtime-preview-2024-10-01" => RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20241001, - "gpt-4o-realtime-preview-2024-12-17" => RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20241217, - "gpt-4o-realtime-preview-2025-06-03" => RealtimeSessionCreateResponseModel.Gpt4oRealtimePreview20250603, - "gpt-4o-mini-realtime-preview" => RealtimeSessionCreateResponseModel.Gpt4oMiniRealtimePreview, - "gpt-4o-mini-realtime-preview-2024-12-17" => RealtimeSessionCreateResponseModel.Gpt4oMiniRealtimePreview20241217, + "gpt-realtime" => RealtimeSessionCreateResponseGAModel.GptRealtime, + "gpt-realtime-2025-08-28" => RealtimeSessionCreateResponseGAModel.GptRealtime20250828, + "gpt-4o-realtime-preview" => RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview, + "gpt-4o-realtime-preview-2024-10-01" => RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20241001, + "gpt-4o-realtime-preview-2024-12-17" => RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20241217, + "gpt-4o-realtime-preview-2025-06-03" => RealtimeSessionCreateResponseGAModel.Gpt4oRealtimePreview20250603, + "gpt-4o-mini-realtime-preview" => RealtimeSessionCreateResponseGAModel.Gpt4oMiniRealtimePreview, + "gpt-4o-mini-realtime-preview-2024-12-17" => RealtimeSessionCreateResponseGAModel.Gpt4oMiniRealtimePreview20241217, _ => null, }; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAOutputModalitie.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAOutputModalitie.g.cs new file mode 100644 index 00000000..f8afb94b --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAOutputModalitie.g.cs @@ -0,0 +1,51 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public enum RealtimeSessionCreateResponseGAOutputModalitie + { + /// + /// + /// + Text, + /// + /// + /// + Audio, + } + + /// + /// Enum extensions to do fast conversions without the reflection. + /// + public static class RealtimeSessionCreateResponseGAOutputModalitieExtensions + { + /// + /// Converts an enum to a string. + /// + public static string ToValueString(this RealtimeSessionCreateResponseGAOutputModalitie value) + { + return value switch + { + RealtimeSessionCreateResponseGAOutputModalitie.Text => "text", + RealtimeSessionCreateResponseGAOutputModalitie.Audio => "audio", + _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), + }; + } + /// + /// Converts an string to a enum. + /// + public static RealtimeSessionCreateResponseGAOutputModalitie? ToEnum(string value) + { + return value switch + { + "text" => RealtimeSessionCreateResponseGAOutputModalitie.Text, + "audio" => RealtimeSessionCreateResponseGAOutputModalitie.Audio, + _ => null, + }; + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum.g.cs new file mode 100644 index 00000000..10a63769 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum.g.cs @@ -0,0 +1,46 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Default tracing mode for the session.
+ /// Default Value: auto + ///
+ public enum RealtimeSessionCreateResponseGATracingEnum + { + /// + /// + /// + Auto, + } + + /// + /// Enum extensions to do fast conversions without the reflection. + /// + public static class RealtimeSessionCreateResponseGATracingEnumExtensions + { + /// + /// Converts an enum to a string. + /// + public static string ToValueString(this RealtimeSessionCreateResponseGATracingEnum value) + { + return value switch + { + RealtimeSessionCreateResponseGATracingEnum.Auto => "auto", + _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), + }; + } + /// + /// Converts an string to a enum. + /// + public static RealtimeSessionCreateResponseGATracingEnum? ToEnum(string value) + { + return value switch + { + "auto" => RealtimeSessionCreateResponseGATracingEnum.Auto, + _ => null, + }; + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.Json.g.cs new file mode 100644 index 00000000..944a0047 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGATracingEnum2 + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnum2; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.g.cs new file mode 100644 index 00000000..2287604a --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnum2.g.cs @@ -0,0 +1,73 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Granular configuration for tracing. + /// + public sealed partial class RealtimeSessionCreateResponseGATracingEnum2 + { + /// + /// The group id to attach to this trace to enable filtering and
+ /// grouping in the Traces Dashboard. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("group_id")] + public string? GroupId { get; set; } + + /// + /// The arbitrary metadata to attach to this trace to enable
+ /// filtering in the Traces Dashboard. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("metadata")] + public object? Metadata { get; set; } + + /// + /// The name of the workflow to attach to this trace. This is used to
+ /// name the trace in the Traces Dashboard. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("workflow_name")] + public string? WorkflowName { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// The group id to attach to this trace to enable filtering and
+ /// grouping in the Traces Dashboard. + /// + /// + /// The arbitrary metadata to attach to this trace to enable
+ /// filtering in the Traces Dashboard. + /// + /// + /// The name of the workflow to attach to this trace. This is used to
+ /// name the trace in the Traces Dashboard. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseGATracingEnum2( + string? groupId, + object? metadata, + string? workflowName) + { + this.GroupId = groupId; + this.Metadata = metadata; + this.WorkflowName = workflowName; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseGATracingEnum2() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.Json.g.cs new file mode 100644 index 00000000..ae7f8e17 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseGATracingEnumMetadata + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseGATracingEnumMetadata; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.g.cs new file mode 100644 index 00000000..8bce9ad0 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGATracingEnumMetadata.g.cs @@ -0,0 +1,19 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// The arbitrary metadata to attach to this trace to enable
+ /// filtering in the Traces Dashboard. + ///
+ public sealed partial class RealtimeSessionCreateResponseGATracingEnumMetadata + { + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAType.g.cs similarity index 70% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAType.g.cs index 099f2360..fdca136b 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseGAType.g.cs @@ -6,7 +6,7 @@ namespace tryAGI.OpenAI /// /// The type of session to create. Always `realtime` for the Realtime API. /// - public enum RealtimeSessionCreateResponseType + public enum RealtimeSessionCreateResponseGAType { /// /// @@ -17,27 +17,27 @@ public enum RealtimeSessionCreateResponseType /// /// Enum extensions to do fast conversions without the reflection. /// - public static class RealtimeSessionCreateResponseTypeExtensions + public static class RealtimeSessionCreateResponseGATypeExtensions { /// /// Converts an enum to a string. /// - public static string ToValueString(this RealtimeSessionCreateResponseType value) + public static string ToValueString(this RealtimeSessionCreateResponseGAType value) { return value switch { - RealtimeSessionCreateResponseType.Realtime => "realtime", + RealtimeSessionCreateResponseGAType.Realtime => "realtime", _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), }; } /// /// Converts an string to a enum. /// - public static RealtimeSessionCreateResponseType? ToEnum(string value) + public static RealtimeSessionCreateResponseGAType? ToEnum(string value) { return value switch { - "realtime" => RealtimeSessionCreateResponseType.Realtime, + "realtime" => RealtimeSessionCreateResponseGAType.Realtime, _ => null, }; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.Json.g.cs new file mode 100644 index 00000000..f1c1e37d --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseOutputModalities + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseOutputModalities; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.g.cs new file mode 100644 index 00000000..7d546538 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseOutputModalities.g.cs @@ -0,0 +1,19 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// The set of modalities the model can respond with. To disable audio,
+ /// set this to ["text"]. + ///
+ public sealed partial class RealtimeSessionCreateResponseOutputModalities + { + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnum2.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnum2.g.cs index 213d44ab..56f71230 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnum2.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnum2.g.cs @@ -10,21 +10,21 @@ public sealed partial class RealtimeSessionCreateResponseTracingEnum2 { /// /// The group id to attach to this trace to enable filtering and
- /// grouping in the Traces Dashboard. + /// grouping in the traces dashboard. ///
[global::System.Text.Json.Serialization.JsonPropertyName("group_id")] public string? GroupId { get; set; } /// /// The arbitrary metadata to attach to this trace to enable
- /// filtering in the Traces Dashboard. + /// filtering in the traces dashboard. ///
[global::System.Text.Json.Serialization.JsonPropertyName("metadata")] public object? Metadata { get; set; } /// /// The name of the workflow to attach to this trace. This is used to
- /// name the trace in the Traces Dashboard. + /// name the trace in the traces dashboard. ///
[global::System.Text.Json.Serialization.JsonPropertyName("workflow_name")] public string? WorkflowName { get; set; } @@ -40,15 +40,15 @@ public sealed partial class RealtimeSessionCreateResponseTracingEnum2 ///
/// /// The group id to attach to this trace to enable filtering and
- /// grouping in the Traces Dashboard. + /// grouping in the traces dashboard. /// /// /// The arbitrary metadata to attach to this trace to enable
- /// filtering in the Traces Dashboard. + /// filtering in the traces dashboard. /// /// /// The name of the workflow to attach to this trace. This is used to
- /// name the trace in the Traces Dashboard. + /// name the trace in the traces dashboard. /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnumMetadata.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnumMetadata.g.cs index d405de18..b45d80a8 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnumMetadata.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTracingEnumMetadata.g.cs @@ -5,7 +5,7 @@ namespace tryAGI.OpenAI { /// /// The arbitrary metadata to attach to this trace to enable
- /// filtering in the Traces Dashboard. + /// filtering in the traces dashboard. ///
public sealed partial class RealtimeSessionCreateResponseTracingEnumMetadata { diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.Json.g.cs new file mode 100644 index 00000000..2dfb1637 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeSessionCreateResponseTurnDetection + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeSessionCreateResponseTurnDetection; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.g.cs new file mode 100644 index 00000000..b251d25c --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeSessionCreateResponseTurnDetection.g.cs @@ -0,0 +1,90 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. + ///
+ public sealed partial class RealtimeSessionCreateResponseTurnDetection + { + /// + /// Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("prefix_padding_ms")] + public int? PrefixPaddingMs { get; set; } + + /// + /// Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("silence_duration_ms")] + public int? SilenceDurationMs { get; set; } + + /// + /// Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("threshold")] + public double? Threshold { get; set; } + + /// + /// Type of turn detection, only `server_vad` is currently supported. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + public string? Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + /// + /// + /// Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + /// + /// + /// Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + /// + /// + /// Type of turn detection, only `server_vad` is currently supported. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeSessionCreateResponseTurnDetection( + int? prefixPaddingMs, + int? silenceDurationMs, + double? threshold, + string? type) + { + this.PrefixPaddingMs = prefixPaddingMs; + this.SilenceDurationMs = silenceDurationMs; + this.Threshold = threshold; + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeSessionCreateResponseTurnDetection() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.Json.g.cs new file mode 100644 index 00000000..a695ae9c --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeTranscriptionSessionCreateResponseGA + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.g.cs new file mode 100644 index 00000000..4b634b3c --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGA.g.cs @@ -0,0 +1,105 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// A Realtime transcription session configuration object. + /// + public sealed partial class RealtimeTranscriptionSessionCreateResponseGA + { + /// + /// Configuration for input audio for the session. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("audio")] + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio? Audio { get; set; } + + /// + /// Expiration timestamp for the session, in seconds since epoch. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("expires_at")] + public int? ExpiresAt { get; set; } + + /// + /// Unique identifier for the session that looks like `sess_1234567890abcdef`. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("id")] + [global::System.Text.Json.Serialization.JsonRequired] + public required string Id { get; set; } + + /// + /// Additional fields to include in server outputs.
+ /// - `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("include")] + public global::System.Collections.Generic.IList? Include { get; set; } + + /// + /// The object type. Always `realtime.transcription_session`. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("object")] + [global::System.Text.Json.Serialization.JsonRequired] + public required string Object { get; set; } + + /// + /// The type of session. Always `transcription` for transcription sessions. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeTranscriptionSessionCreateResponseGATypeJsonConverter))] + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Configuration for input audio for the session. + /// + /// + /// Expiration timestamp for the session, in seconds since epoch. + /// + /// + /// Unique identifier for the session that looks like `sess_1234567890abcdef`. + /// + /// + /// Additional fields to include in server outputs.
+ /// - `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription. + /// + /// + /// The object type. Always `realtime.transcription_session`. + /// + /// + /// The type of session. Always `transcription` for transcription sessions. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeTranscriptionSessionCreateResponseGA( + string id, + string @object, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio? audio, + int? expiresAt, + global::System.Collections.Generic.IList? include, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAType type) + { + this.Id = id ?? throw new global::System.ArgumentNullException(nameof(id)); + this.Object = @object ?? throw new global::System.ArgumentNullException(nameof(@object)); + this.Audio = audio; + this.ExpiresAt = expiresAt; + this.Include = include; + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeTranscriptionSessionCreateResponseGA() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.Json.g.cs new file mode 100644 index 00000000..972ecaf0 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudio + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudio; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.g.cs new file mode 100644 index 00000000..bd9ba868 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudio.g.cs @@ -0,0 +1,43 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for input audio for the session. + /// + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudio + { + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("input")] + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput? Input { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeTranscriptionSessionCreateResponseGAAudio( + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput? input) + { + this.Input = input; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeTranscriptionSessionCreateResponseGAAudio() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.Json.g.cs new file mode 100644 index 00000000..c2d8df82 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInput + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInput; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.g.cs new file mode 100644 index 00000000..2b2033a9 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInput.g.cs @@ -0,0 +1,79 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInput + { + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("format")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.RealtimeAudioFormatsJsonConverter))] + public global::tryAGI.OpenAI.RealtimeAudioFormats? Format { get; set; } + + /// + /// Configuration for input audio noise reduction. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("noise_reduction")] + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction? NoiseReduction { get; set; } + + /// + /// + /// + [global::System.Text.Json.Serialization.JsonPropertyName("transcription")] + public global::tryAGI.OpenAI.AudioTranscription? Transcription { get; set; } + + /// + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("turn_detection")] + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection? TurnDetection { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// + /// Configuration for input audio noise reduction. + /// + /// + /// + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeTranscriptionSessionCreateResponseGAAudioInput( + global::tryAGI.OpenAI.RealtimeAudioFormats? format, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction? noiseReduction, + global::tryAGI.OpenAI.AudioTranscription? transcription, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection? turnDetection) + { + this.Format = format; + this.NoiseReduction = noiseReduction; + this.Transcription = transcription; + this.TurnDetection = turnDetection; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeTranscriptionSessionCreateResponseGAAudioInput() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs new file mode 100644 index 00000000..8c9c3454 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.g.cs new file mode 100644 index 00000000..7e9a0b2f --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction.g.cs @@ -0,0 +1,46 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for input audio noise reduction. + /// + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction + { + /// + /// Type of noise reduction. `near_field` is for close-talking microphones such as headphones, `far_field` is for far-field microphones such as laptop or conference room microphones. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + [global::System.Text.Json.Serialization.JsonConverter(typeof(global::tryAGI.OpenAI.JsonConverters.NoiseReductionTypeJsonConverter))] + public global::tryAGI.OpenAI.NoiseReductionType? Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Type of noise reduction. `near_field` is for close-talking microphones such as headphones, `far_field` is for far-field microphones such as laptop or conference room microphones. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction( + global::tryAGI.OpenAI.NoiseReductionType? type) + { + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeTranscriptionSessionCreateResponseGAAudioInputNoiseReduction() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs new file mode 100644 index 00000000..0f1b6494 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.Json.g.cs @@ -0,0 +1,92 @@ +#nullable enable + +namespace tryAGI.OpenAI +{ + public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection + { + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerContext. + /// + public string ToJson( + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + this.GetType(), + jsonSerializerContext); + } + + /// + /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public string ToJson( + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Serialize( + this, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerContext. + /// + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection? FromJson( + string json, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection), + jsonSerializerContext) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection; + } + + /// + /// Deserializes a JSON string using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection? FromJson( + string json, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.Deserialize( + json, + jsonSerializerOptions); + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerContext. + /// + public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext) + { + return (await global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection), + jsonSerializerContext).ConfigureAwait(false)) as global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection; + } + + /// + /// Deserializes a JSON stream using the provided JsonSerializerOptions. + /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif + public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync( + global::System.IO.Stream jsonStream, + global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null) + { + return global::System.Text.Json.JsonSerializer.DeserializeAsync( + jsonStream, + jsonSerializerOptions); + } + } +} diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.g.cs new file mode 100644 index 00000000..6b773d51 --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection.g.cs @@ -0,0 +1,90 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// Configuration for turn detection. Can be set to `null` to turn off. Server
+ /// VAD means that the model will detect the start and end of speech based on
+ /// audio volume and respond at the end of user speech. + ///
+ public sealed partial class RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection + { + /// + /// Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("prefix_padding_ms")] + public int? PrefixPaddingMs { get; set; } + + /// + /// Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("silence_duration_ms")] + public int? SilenceDurationMs { get; set; } + + /// + /// Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("threshold")] + public double? Threshold { get; set; } + + /// + /// Type of turn detection, only `server_vad` is currently supported. + /// + [global::System.Text.Json.Serialization.JsonPropertyName("type")] + public string? Type { get; set; } + + /// + /// Additional properties that are not explicitly defined in the schema + /// + [global::System.Text.Json.Serialization.JsonExtensionData] + public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary(); + + /// + /// Initializes a new instance of the class. + /// + /// + /// Amount of audio to include before the VAD detected speech (in
+ /// milliseconds). Defaults to 300ms. + /// + /// + /// Duration of silence to detect speech stop (in milliseconds). Defaults
+ /// to 500ms. With shorter values the model will respond more quickly,
+ /// but may jump in on short pauses from the user. + /// + /// + /// Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A
+ /// higher threshold will require louder audio to activate the model, and
+ /// thus might perform better in noisy environments. + /// + /// + /// Type of turn detection, only `server_vad` is currently supported. + /// +#if NET7_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] +#endif + public RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection( + int? prefixPaddingMs, + int? silenceDurationMs, + double? threshold, + string? type) + { + this.PrefixPaddingMs = prefixPaddingMs; + this.SilenceDurationMs = silenceDurationMs; + this.Threshold = threshold; + this.Type = type; + } + + /// + /// Initializes a new instance of the class. + /// + public RealtimeTranscriptionSessionCreateResponseGAAudioInputTurnDetection() + { + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs new file mode 100644 index 00000000..8e906d3a --- /dev/null +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAIncludeItem.g.cs @@ -0,0 +1,45 @@ + +#nullable enable + +namespace tryAGI.OpenAI +{ + /// + /// + /// + public enum RealtimeTranscriptionSessionCreateResponseGAIncludeItem + { + /// + /// + /// + ItemInputAudioTranscriptionLogprobs, + } + + /// + /// Enum extensions to do fast conversions without the reflection. + /// + public static class RealtimeTranscriptionSessionCreateResponseGAIncludeItemExtensions + { + /// + /// Converts an enum to a string. + /// + public static string ToValueString(this RealtimeTranscriptionSessionCreateResponseGAIncludeItem value) + { + return value switch + { + RealtimeTranscriptionSessionCreateResponseGAIncludeItem.ItemInputAudioTranscriptionLogprobs => "item.input_audio_transcription.logprobs", + _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), + }; + } + /// + /// Converts an string to a enum. + /// + public static RealtimeTranscriptionSessionCreateResponseGAIncludeItem? ToEnum(string value) + { + return value switch + { + "item.input_audio_transcription.logprobs" => RealtimeTranscriptionSessionCreateResponseGAIncludeItem.ItemInputAudioTranscriptionLogprobs, + _ => null, + }; + } + } +} \ No newline at end of file diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreatedType.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAType.g.cs similarity index 50% rename from src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreatedType.g.cs rename to src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAType.g.cs index 168ec79b..9eb14896 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeServerEventTranscriptionSessionCreatedType.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.RealtimeTranscriptionSessionCreateResponseGAType.g.cs @@ -4,40 +4,40 @@ namespace tryAGI.OpenAI { /// - /// The event type, must be `transcription_session.created`. + /// The type of session. Always `transcription` for transcription sessions. /// - public enum RealtimeServerEventTranscriptionSessionCreatedType + public enum RealtimeTranscriptionSessionCreateResponseGAType { /// /// /// - TranscriptionSessionCreated, + Transcription, } /// /// Enum extensions to do fast conversions without the reflection. /// - public static class RealtimeServerEventTranscriptionSessionCreatedTypeExtensions + public static class RealtimeTranscriptionSessionCreateResponseGATypeExtensions { /// /// Converts an enum to a string. /// - public static string ToValueString(this RealtimeServerEventTranscriptionSessionCreatedType value) + public static string ToValueString(this RealtimeTranscriptionSessionCreateResponseGAType value) { return value switch { - RealtimeServerEventTranscriptionSessionCreatedType.TranscriptionSessionCreated => "transcription_session.created", + RealtimeTranscriptionSessionCreateResponseGAType.Transcription => "transcription", _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null), }; } /// /// Converts an string to a enum. /// - public static RealtimeServerEventTranscriptionSessionCreatedType? ToEnum(string value) + public static RealtimeTranscriptionSessionCreateResponseGAType? ToEnum(string value) { return value switch { - "transcription_session.created" => RealtimeServerEventTranscriptionSessionCreatedType.TranscriptionSessionCreated, + "transcription" => RealtimeTranscriptionSessionCreateResponseGAType.Transcription, _ => null, }; } diff --git a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.Session2.g.cs b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.Session2.g.cs index ad3a7bcd..386e56f4 100644 --- a/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.Session2.g.cs +++ b/src/libs/tryAGI.OpenAI/Generated/tryAGI.OpenAI.Models.Session2.g.cs @@ -14,101 +14,98 @@ namespace tryAGI.OpenAI /// for keys is one minute. ///
#if NET6_0_OR_GREATER - public global::tryAGI.OpenAI.RealtimeSessionCreateResponse? RealtimeSessionCreateResponse { get; init; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? RealtimeSessionCreateResponseGA { get; init; } #else - public global::tryAGI.OpenAI.RealtimeSessionCreateResponse? RealtimeSessionCreateResponse { get; } + public global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? RealtimeSessionCreateResponseGA { get; } #endif /// /// /// #if NET6_0_OR_GREATER - [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(RealtimeSessionCreateResponse))] + [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(RealtimeSessionCreateResponseGA))] #endif - public bool IsRealtimeSessionCreateResponse => RealtimeSessionCreateResponse != null; + public bool IsRealtimeSessionCreateResponseGA => RealtimeSessionCreateResponseGA != null; /// /// /// - public static implicit operator Session2(global::tryAGI.OpenAI.RealtimeSessionCreateResponse value) => new Session2((global::tryAGI.OpenAI.RealtimeSessionCreateResponse?)value); + public static implicit operator Session2(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA value) => new Session2((global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA?)value); /// /// /// - public static implicit operator global::tryAGI.OpenAI.RealtimeSessionCreateResponse?(Session2 @this) => @this.RealtimeSessionCreateResponse; + public static implicit operator global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA?(Session2 @this) => @this.RealtimeSessionCreateResponseGA; /// /// /// - public Session2(global::tryAGI.OpenAI.RealtimeSessionCreateResponse? value) + public Session2(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? value) { - RealtimeSessionCreateResponse = value; + RealtimeSessionCreateResponseGA = value; } /// - /// A new Realtime transcription session configuration.
- /// When a session is created on the server via REST API, the session object
- /// also contains an ephemeral key. Default TTL for keys is 10 minutes. This
- /// property is not present when a session is updated via the WebSocket API. + /// A Realtime transcription session configuration object. ///
#if NET6_0_OR_GREATER - public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse? RealtimeTranscriptionSessionCreateResponse { get; init; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? RealtimeTranscriptionSessionCreateResponseGA { get; init; } #else - public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse? RealtimeTranscriptionSessionCreateResponse { get; } + public global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? RealtimeTranscriptionSessionCreateResponseGA { get; } #endif /// /// /// #if NET6_0_OR_GREATER - [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(RealtimeTranscriptionSessionCreateResponse))] + [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(RealtimeTranscriptionSessionCreateResponseGA))] #endif - public bool IsRealtimeTranscriptionSessionCreateResponse => RealtimeTranscriptionSessionCreateResponse != null; + public bool IsRealtimeTranscriptionSessionCreateResponseGA => RealtimeTranscriptionSessionCreateResponseGA != null; /// /// /// - public static implicit operator Session2(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse value) => new Session2((global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse?)value); + public static implicit operator Session2(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA value) => new Session2((global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA?)value); /// /// /// - public static implicit operator global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse?(Session2 @this) => @this.RealtimeTranscriptionSessionCreateResponse; + public static implicit operator global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA?(Session2 @this) => @this.RealtimeTranscriptionSessionCreateResponseGA; /// /// /// - public Session2(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse? value) + public Session2(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? value) { - RealtimeTranscriptionSessionCreateResponse = value; + RealtimeTranscriptionSessionCreateResponseGA = value; } /// /// /// public Session2( - global::tryAGI.OpenAI.RealtimeSessionCreateResponse? realtimeSessionCreateResponse, - global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse? realtimeTranscriptionSessionCreateResponse + global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA? realtimeSessionCreateResponseGA, + global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA? realtimeTranscriptionSessionCreateResponseGA ) { - RealtimeSessionCreateResponse = realtimeSessionCreateResponse; - RealtimeTranscriptionSessionCreateResponse = realtimeTranscriptionSessionCreateResponse; + RealtimeSessionCreateResponseGA = realtimeSessionCreateResponseGA; + RealtimeTranscriptionSessionCreateResponseGA = realtimeTranscriptionSessionCreateResponseGA; } /// /// /// public object? Object => - RealtimeTranscriptionSessionCreateResponse as object ?? - RealtimeSessionCreateResponse as object + RealtimeTranscriptionSessionCreateResponseGA as object ?? + RealtimeSessionCreateResponseGA as object ; /// /// /// public override string? ToString() => - RealtimeSessionCreateResponse?.ToString() ?? - RealtimeTranscriptionSessionCreateResponse?.ToString() + RealtimeSessionCreateResponseGA?.ToString() ?? + RealtimeTranscriptionSessionCreateResponseGA?.ToString() ; /// @@ -116,15 +113,15 @@ RealtimeSessionCreateResponse as object /// public bool Validate() { - return IsRealtimeSessionCreateResponse || IsRealtimeTranscriptionSessionCreateResponse; + return IsRealtimeSessionCreateResponseGA || IsRealtimeTranscriptionSessionCreateResponseGA; } /// /// /// public TResult? Match( - global::System.Func? realtimeSessionCreateResponse = null, - global::System.Func? realtimeTranscriptionSessionCreateResponse = null, + global::System.Func? realtimeSessionCreateResponseGA = null, + global::System.Func? realtimeTranscriptionSessionCreateResponseGA = null, bool validate = true) { if (validate) @@ -132,13 +129,13 @@ public bool Validate() Validate(); } - if (IsRealtimeSessionCreateResponse && realtimeSessionCreateResponse != null) + if (IsRealtimeSessionCreateResponseGA && realtimeSessionCreateResponseGA != null) { - return realtimeSessionCreateResponse(RealtimeSessionCreateResponse!); + return realtimeSessionCreateResponseGA(RealtimeSessionCreateResponseGA!); } - else if (IsRealtimeTranscriptionSessionCreateResponse && realtimeTranscriptionSessionCreateResponse != null) + else if (IsRealtimeTranscriptionSessionCreateResponseGA && realtimeTranscriptionSessionCreateResponseGA != null) { - return realtimeTranscriptionSessionCreateResponse(RealtimeTranscriptionSessionCreateResponse!); + return realtimeTranscriptionSessionCreateResponseGA(RealtimeTranscriptionSessionCreateResponseGA!); } return default(TResult); @@ -148,8 +145,8 @@ public bool Validate() /// ///
public void Match( - global::System.Action? realtimeSessionCreateResponse = null, - global::System.Action? realtimeTranscriptionSessionCreateResponse = null, + global::System.Action? realtimeSessionCreateResponseGA = null, + global::System.Action? realtimeTranscriptionSessionCreateResponseGA = null, bool validate = true) { if (validate) @@ -157,13 +154,13 @@ public void Match( Validate(); } - if (IsRealtimeSessionCreateResponse) + if (IsRealtimeSessionCreateResponseGA) { - realtimeSessionCreateResponse?.Invoke(RealtimeSessionCreateResponse!); + realtimeSessionCreateResponseGA?.Invoke(RealtimeSessionCreateResponseGA!); } - else if (IsRealtimeTranscriptionSessionCreateResponse) + else if (IsRealtimeTranscriptionSessionCreateResponseGA) { - realtimeTranscriptionSessionCreateResponse?.Invoke(RealtimeTranscriptionSessionCreateResponse!); + realtimeTranscriptionSessionCreateResponseGA?.Invoke(RealtimeTranscriptionSessionCreateResponseGA!); } } @@ -174,10 +171,10 @@ public override int GetHashCode() { var fields = new object?[] { - RealtimeSessionCreateResponse, - typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponse), - RealtimeTranscriptionSessionCreateResponse, - typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponse), + RealtimeSessionCreateResponseGA, + typeof(global::tryAGI.OpenAI.RealtimeSessionCreateResponseGA), + RealtimeTranscriptionSessionCreateResponseGA, + typeof(global::tryAGI.OpenAI.RealtimeTranscriptionSessionCreateResponseGA), }; const int offset = unchecked((int)2166136261); const int prime = 16777619; @@ -194,8 +191,8 @@ static int HashCodeAggregator(int hashCode, object? value) => value == null public bool Equals(Session2 other) { return - global::System.Collections.Generic.EqualityComparer.Default.Equals(RealtimeSessionCreateResponse, other.RealtimeSessionCreateResponse) && - global::System.Collections.Generic.EqualityComparer.Default.Equals(RealtimeTranscriptionSessionCreateResponse, other.RealtimeTranscriptionSessionCreateResponse) + global::System.Collections.Generic.EqualityComparer.Default.Equals(RealtimeSessionCreateResponseGA, other.RealtimeSessionCreateResponseGA) && + global::System.Collections.Generic.EqualityComparer.Default.Equals(RealtimeTranscriptionSessionCreateResponseGA, other.RealtimeTranscriptionSessionCreateResponseGA) ; } diff --git a/src/libs/tryAGI.OpenAI/openapi.yaml b/src/libs/tryAGI.OpenAI/openapi.yaml index 4f0f723d..faa286bf 100644 --- a/src/libs/tryAGI.OpenAI/openapi.yaml +++ b/src/libs/tryAGI.OpenAI/openapi.yaml @@ -21477,7 +21477,6 @@ components: - $ref: '#/components/schemas/RealtimeClientEventResponseCancel' - $ref: '#/components/schemas/RealtimeClientEventResponseCreate' - $ref: '#/components/schemas/RealtimeClientEventSessionUpdate' - - $ref: '#/components/schemas/RealtimeClientEventTranscriptionSessionUpdate' description: "A realtime client event.\n" discriminator: propertyName: type @@ -21726,7 +21725,7 @@ components: - session.update description: 'The event type, must be `session.update`.' x-stainless-const: true - description: "Send this event to update the session’s configuration.\nThe client may send this event at any time to update any field\nexcept for `voice` and `model`. `voice` can be updated only if there have been no other\naudio outputs yet. \n\nWhen the server receives a `session.update`, it will respond\nwith a `session.updated` event showing the full, effective configuration.\nOnly the fields that are present in the `session.update` are updated. To clear a field like\n`instructions`, pass an empty string. To clear a field like `tools`, pass an empty array.\nTo clear a field like `turn_detection`, pass `null`.\n" + description: "Send this event to update the session’s configuration.\nThe client may send this event at any time to update any field\nexcept for `voice` and `model`. `voice` can be updated only if there have been no other audio outputs yet.\n\nWhen the server receives a `session.update`, it will respond\nwith a `session.updated` event showing the full, effective configuration.\nOnly the fields that are present in the `session.update` are updated. To clear a field like\n`instructions`, pass an empty string. To clear a field like `tools`, pass an empty array.\nTo clear a field like `turn_detection`, pass `null`.\n" x-oaiMeta: example: "{\n \"type\": \"session.update\",\n \"session\": {\n \"type\": \"realtime\",\n \"instructions\": \"You are a creative assistant that helps with design tasks.\",\n \"tools\": [\n {\n \"type\": \"function\",\n \"name\": \"display_color_palette\",\n \"description\": \"Call this function when a user asks for a color palette.\",\n \"parameters\": {\n \"type\": \"object\",\n \"strict\": true,\n \"properties\": {\n \"theme\": {\n \"type\": \"string\",\n \"description\": \"Description of the theme for the color scheme.\"\n },\n \"colors\": {\n \"type\": \"array\",\n \"description\": \"Array of five hex color codes based on the theme.\",\n \"items\": {\n \"type\": \"string\",\n \"description\": \"Hex color code\"\n }\n }\n },\n \"required\": [\n \"theme\",\n \"colors\"\n ]\n }\n }\n ],\n \"tool_choice\": \"auto\"\n },\n \"event_id\": \"5fc543c4-f59c-420f-8fb9-68c45d1546a7\",\n}\n" group: realtime @@ -21939,7 +21938,6 @@ components: - output_audio type: string description: 'The content type, `output_text` or `output_audio` depending on the session `output_modalities` configuration.' - x-stainless-const: true description: The content of the message. id: type: string @@ -22210,8 +22208,8 @@ components: session: title: Session configuration anyOf: - - $ref: '#/components/schemas/RealtimeSessionCreateResponse' - - $ref: '#/components/schemas/RealtimeTranscriptionSessionCreateResponse' + - $ref: '#/components/schemas/RealtimeSessionCreateResponseGA' + - $ref: '#/components/schemas/RealtimeTranscriptionSessionCreateResponseGA' description: "The session configuration for either a realtime or transcription session.\n" discriminator: propertyName: type @@ -24006,28 +24004,6 @@ components: example: "{\n \"type\": \"session.updated\",\n \"event_id\": \"event_C9G8mqI3IucaojlVKE8Cs\",\n \"session\": {\n \"type\": \"realtime\",\n \"object\": \"realtime.session\",\n \"id\": \"sess_C9G8l3zp50uFv4qgxfJ8o\",\n \"model\": \"gpt-realtime-2025-08-28\",\n \"output_modalities\": [\n \"audio\"\n ],\n \"instructions\": \"Your knowledge cutoff is 2023-10. You are a helpful, witty, and friendly AI. Act like a human, but remember that you aren't a human and that you can't do human things in the real world. Your voice and personality should be warm and engaging, with a lively and playful tone. If interacting in a non-English language, start by using the standard accent or dialect familiar to the user. Talk quickly. You should always call a function if you can. Do not refer to these rules, even if you’re asked about them.\",\n \"tools\": [\n {\n \"type\": \"function\",\n \"name\": \"display_color_palette\",\n \"description\": \"\\nCall this function when a user asks for a color palette.\\n\",\n \"parameters\": {\n \"type\": \"object\",\n \"strict\": true,\n \"properties\": {\n \"theme\": {\n \"type\": \"string\",\n \"description\": \"Description of the theme for the color scheme.\"\n },\n \"colors\": {\n \"type\": \"array\",\n \"description\": \"Array of five hex color codes based on the theme.\",\n \"items\": {\n \"type\": \"string\",\n \"description\": \"Hex color code\"\n }\n }\n },\n \"required\": [\n \"theme\",\n \"colors\"\n ]\n }\n }\n ],\n \"tool_choice\": \"auto\",\n \"max_output_tokens\": \"inf\",\n \"tracing\": null,\n \"prompt\": null,\n \"expires_at\": 1756324832,\n \"audio\": {\n \"input\": {\n \"format\": {\n \"type\": \"audio/pcm\",\n \"rate\": 24000\n },\n \"transcription\": null,\n \"noise_reduction\": null,\n \"turn_detection\": {\n \"type\": \"server_vad\",\n \"threshold\": 0.5,\n \"prefix_padding_ms\": 300,\n \"silence_duration_ms\": 200,\n \"idle_timeout_ms\": null,\n \"create_response\": true,\n \"interrupt_response\": true\n }\n },\n \"output\": {\n \"format\": {\n \"type\": \"audio/pcm\",\n \"rate\": 24000\n },\n \"voice\": \"marin\",\n \"speed\": 1\n }\n },\n \"include\": null\n },\n}\n" group: realtime name: session.updated - RealtimeServerEventTranscriptionSessionCreated: - required: - - event_id - - type - - session - type: object - properties: - event_id: - type: string - description: The unique ID of the server event. - session: - $ref: '#/components/schemas/RealtimeTranscriptionSessionCreateResponse' - type: - enum: - - transcription_session.created - description: 'The event type, must be `transcription_session.created`.' - x-stainless-const: true - description: "Returned when a transcription session is created.\n" - x-oaiMeta: - example: "{\n \"event_id\": \"event_5566\",\n \"type\": \"transcription_session.created\",\n \"session\": {\n \"id\": \"sess_001\",\n \"object\": \"realtime.transcription_session\",\n \"input_audio_format\": \"pcm16\",\n \"input_audio_transcription\": {\n \"model\": \"gpt-4o-transcribe\",\n \"prompt\": \"\",\n \"language\": \"\"\n },\n \"turn_detection\": {\n \"type\": \"server_vad\",\n \"threshold\": 0.5,\n \"prefix_padding_ms\": 300,\n \"silence_duration_ms\": 500\n },\n \"input_audio_noise_reduction\": {\n \"type\": \"near_field\"\n },\n \"include\": []\n }\n}\n" - group: realtime - name: transcription_session.created RealtimeServerEventTranscriptionSessionUpdated: required: - event_id @@ -24340,7 +24316,7 @@ components: default: auto idle_timeout_ms: type: integer - description: "Optional idle timeout after which turn detection will auto-timeout when\nno additional audio is received.\n" + description: "Optional idle timeout after which turn detection will auto-timeout when\nno additional audio is received and emits a `timeout_triggered` event.\n" nullable: true interrupt_response: type: boolean @@ -24472,6 +24448,141 @@ components: x-stainless-const: true description: Realtime session object configuration. RealtimeSessionCreateResponse: + title: Realtime session configuration object + type: object + properties: + audio: + type: object + properties: + input: + type: object + properties: + format: + $ref: '#/components/schemas/RealtimeAudioFormats' + noise_reduction: + type: object + properties: + type: + $ref: '#/components/schemas/NoiseReductionType' + description: "Configuration for input audio noise reduction.\n" + transcription: + $ref: '#/components/schemas/AudioTranscription' + turn_detection: + type: object + properties: + prefix_padding_ms: + type: integer + silence_duration_ms: + type: integer + threshold: + type: number + type: + type: string + description: "Type of turn detection, only `server_vad` is currently supported.\n" + description: "Configuration for turn detection.\n" + output: + type: object + properties: + format: + $ref: '#/components/schemas/RealtimeAudioFormats' + speed: + type: number + voice: + $ref: '#/components/schemas/VoiceIdsShared' + description: "Configuration for input and output audio for the session.\n" + expires_at: + type: integer + description: 'Expiration timestamp for the session, in seconds since epoch.' + id: + type: string + description: "Unique identifier for the session that looks like `sess_1234567890abcdef`.\n" + include: + type: array + items: + enum: + - item.input_audio_transcription.logprobs + type: string + description: "Additional fields to include in server outputs.\n- `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription.\n" + instructions: + type: string + description: "The default system instructions (i.e. system message) prepended to model\ncalls. This field allows the client to guide the model on desired\nresponses. The model can be instructed on response content and format,\n(e.g. \"be extremely succinct\", \"act friendly\", \"here are examples of good\nresponses\") and on audio behavior (e.g. \"talk quickly\", \"inject emotion\ninto your voice\", \"laugh frequently\"). The instructions are not guaranteed\nto be followed by the model, but they provide guidance to the model on the\ndesired behavior.\n\nNote that the server sets default instructions which will be used if this\nfield is not set and are visible in the `session.created` event at the\nstart of the session.\n" + max_output_tokens: + anyOf: + - type: integer + - enum: + - inf + type: string + x-stainless-const: true + description: "Maximum number of output tokens for a single assistant response,\ninclusive of tool calls. Provide an integer between 1 and 4096 to\nlimit output tokens, or `inf` for the maximum available tokens for a\ngiven model. Defaults to `inf`.\n" + model: + type: string + description: The Realtime model used for this session. + object: + type: string + description: The object type. Always `realtime.session`. + output_modalities: + items: + enum: + - text + - audio + type: string + description: "The set of modalities the model can respond with. To disable audio,\nset this to [\"text\"].\n" + tool_choice: + type: string + description: "How the model chooses tools. Options are `auto`, `none`, `required`, or\nspecify a function.\n" + tools: + type: array + items: + $ref: '#/components/schemas/RealtimeFunctionTool' + description: Tools (functions) available to the model. + tracing: + title: Tracing Configuration + anyOf: + - enum: + - auto + type: string + description: "Default tracing mode for the session.\n" + default: auto + x-stainless-const: true + - title: Tracing Configuration + type: object + properties: + group_id: + type: string + description: "The group id to attach to this trace to enable filtering and\ngrouping in the traces dashboard.\n" + metadata: + type: object + description: "The arbitrary metadata to attach to this trace to enable\nfiltering in the traces dashboard.\n" + workflow_name: + type: string + description: "The name of the workflow to attach to this trace. This is used to\nname the trace in the traces dashboard.\n" + description: "Granular configuration for tracing.\n" + description: "Configuration options for tracing. Set to null to disable tracing. Once\ntracing is enabled for a session, the configuration cannot be modified.\n\n`auto` will create a trace for the session with default values for the\nworkflow name, group id, and metadata.\n" + turn_detection: + type: object + properties: + prefix_padding_ms: + type: integer + description: "Amount of audio to include before the VAD detected speech (in\nmilliseconds). Defaults to 300ms.\n" + silence_duration_ms: + type: integer + description: "Duration of silence to detect speech stop (in milliseconds). Defaults\nto 500ms. With shorter values the model will respond more quickly,\nbut may jump in on short pauses from the user.\n" + threshold: + type: number + description: "Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A\nhigher threshold will require louder audio to activate the model, and\nthus might perform better in noisy environments.\n" + type: + type: string + description: "Type of turn detection, only `server_vad` is currently supported.\n" + description: "Configuration for turn detection. Can be set to `null` to turn off. Server\nVAD means that the model will detect the start and end of speech based on\naudio volume and respond at the end of user speech.\n" + description: "A Realtime session configuration object.\n" + x-oaiMeta: + example: "{\n \"id\": \"sess_001\",\n \"object\": \"realtime.session\",\n \"expires_at\": 1742188264,\n \"model\": \"gpt-realtime\",\n \"output_modalities\": [\"audio\"],\n \"instructions\": \"You are a friendly assistant.\",\n \"tools\": [],\n \"tool_choice\": \"none\",\n \"max_output_tokens\": \"inf\",\n \"tracing\": \"auto\",\n \"truncation\": \"auto\",\n \"prompt\": null,\n \"audio\": {\n \"input\": {\n \"format\": {\n \"type\": \"audio/pcm\",\n \"rate\": 24000\n },\n \"transcription\": { \"model\": \"whisper-1\" },\n \"noise_reduction\": null,\n \"turn_detection\": null\n },\n \"output\": {\n \"format\": {\n \"type\": \"audio/pcm\",\n \"rate\": 24000\n },\n \"voice\": \"alloy\",\n \"speed\": 1.0\n }\n }\n}\n" + group: realtime + name: The session object + RealtimeSessionCreateResponseGA: + required: + - client_secret + - type type: object properties: audio: @@ -24508,7 +24619,7 @@ components: default: auto idle_timeout_ms: type: integer - description: "Optional idle timeout after which turn detection will auto-timeout when\nno additional audio is received.\n" + description: "Optional idle timeout after which turn detection will auto-timeout when\nno additional audio is received and emits a `timeout_triggered` event.\n" nullable: true interrupt_response: type: boolean @@ -24827,6 +24938,74 @@ components: example: "{\n \"id\": \"sess_BBwZc7cFV3XizEyKGDCGL\",\n \"object\": \"realtime.transcription_session\",\n \"expires_at\": 1742188264,\n \"modalities\": [\"audio\", \"text\"],\n \"turn_detection\": {\n \"type\": \"server_vad\",\n \"threshold\": 0.5,\n \"prefix_padding_ms\": 300,\n \"silence_duration_ms\": 200\n },\n \"input_audio_format\": \"pcm16\",\n \"input_audio_transcription\": {\n \"model\": \"gpt-4o-transcribe\",\n \"language\": null,\n \"prompt\": \"\"\n },\n \"client_secret\": null\n}\n" group: realtime name: The transcription session object + RealtimeTranscriptionSessionCreateResponseGA: + title: Realtime transcription session configuration object + required: + - type + - id + - object + type: object + properties: + audio: + type: object + properties: + input: + type: object + properties: + format: + $ref: '#/components/schemas/RealtimeAudioFormats' + noise_reduction: + type: object + properties: + type: + $ref: '#/components/schemas/NoiseReductionType' + description: "Configuration for input audio noise reduction.\n" + transcription: + $ref: '#/components/schemas/AudioTranscription' + turn_detection: + type: object + properties: + prefix_padding_ms: + type: integer + description: "Amount of audio to include before the VAD detected speech (in\nmilliseconds). Defaults to 300ms.\n" + silence_duration_ms: + type: integer + description: "Duration of silence to detect speech stop (in milliseconds). Defaults\nto 500ms. With shorter values the model will respond more quickly,\nbut may jump in on short pauses from the user.\n" + threshold: + type: number + description: "Activation threshold for VAD (0.0 to 1.0), this defaults to 0.5. A\nhigher threshold will require louder audio to activate the model, and\nthus might perform better in noisy environments.\n" + type: + type: string + description: "Type of turn detection, only `server_vad` is currently supported.\n" + description: "Configuration for turn detection. Can be set to `null` to turn off. Server\nVAD means that the model will detect the start and end of speech based on\naudio volume and respond at the end of user speech.\n" + description: "Configuration for input audio for the session.\n" + expires_at: + type: integer + description: 'Expiration timestamp for the session, in seconds since epoch.' + id: + type: string + description: "Unique identifier for the session that looks like `sess_1234567890abcdef`.\n" + include: + type: array + items: + enum: + - item.input_audio_transcription.logprobs + type: string + description: "Additional fields to include in server outputs.\n- `item.input_audio_transcription.logprobs`: Include logprobs for input audio transcription.\n" + object: + type: string + description: The object type. Always `realtime.transcription_session`. + type: + enum: + - transcription + type: string + description: "The type of session. Always `transcription` for transcription sessions.\n" + x-stainless-const: true + description: "A Realtime transcription session configuration object.\n" + x-oaiMeta: + example: "{\n \"id\": \"sess_BBwZc7cFV3XizEyKGDCGL\",\n \"type\": \"transcription\",\n \"object\": \"realtime.transcription_session\",\n \"expires_at\": 1742188264,\n \"include\": [\"item.input_audio_transcription.logprobs\"],\n \"audio\": {\n \"input\": {\n \"format\": \"pcm16\",\n \"transcription\": {\n \"model\": \"gpt-4o-transcribe\",\n \"language\": null,\n \"prompt\": \"\"\n },\n \"noise_reduction\": null,\n \"turn_detection\": {\n \"type\": \"server_vad\",\n \"threshold\": 0.5,\n \"prefix_padding_ms\": 300,\n \"silence_duration_ms\": 200\n }\n }\n }\n}\n" + group: realtime + name: The transcription session object RealtimeTruncation: title: Realtime Truncation Controls anyOf: