Skip to content

Implement generated ListBatches operations #606

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions api/OpenAI.net8.0.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1355,10 +1355,45 @@ public class BatchClient {
public virtual Task<CreateBatchOperation> CreateBatchAsync(BinaryContent content, bool waitUntilCompleted, RequestOptions options = null);
public virtual ClientResult GetBatch(string batchId, RequestOptions options);
public virtual Task<ClientResult> GetBatchAsync(string batchId, RequestOptions options);
public virtual CollectionResult<BatchJob> GetBatches(BatchCollectionOptions options = null, CancellationToken cancellationToken = default);
public virtual CollectionResult GetBatches(string after, int? limit, RequestOptions options);
public virtual AsyncCollectionResult<BatchJob> GetBatchesAsync(BatchCollectionOptions options = null, CancellationToken cancellationToken = default);
public virtual AsyncCollectionResult GetBatchesAsync(string after, int? limit, RequestOptions options);
}
[Experimental("OPENAI001")]
public class BatchCollectionOptions : IJsonModel<BatchCollectionOptions>, IPersistableModel<BatchCollectionOptions> {
public string AfterId { get; set; }
public int? PageSizeLimit { get; set; }
protected virtual BatchCollectionOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options);
protected virtual BatchCollectionOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options);
protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options);
}
[Experimental("OPENAI001")]
public class BatchJob : IJsonModel<BatchJob>, IPersistableModel<BatchJob> {
public DateTimeOffset? CancelledAt { get; }
public DateTimeOffset? CancellingAt { get; }
public DateTimeOffset? CompletedAt { get; }
public string CompletionWindow { get; }
public DateTimeOffset CreatedAt { get; }
public string Endpoint { get; }
public string ErrorFileId { get; }
public DateTimeOffset? ExpiredAt { get; }
public DateTimeOffset? ExpiresAt { get; }
public DateTimeOffset? FailedAt { get; }
public DateTimeOffset? FinalizingAt { get; }
public string Id { get; }
public DateTimeOffset? InProgressAt { get; }
public string InputFileId { get; }
public IDictionary<string, string> Metadata { get; }
public string Object { get; }
public string OutputFileId { get; }
protected virtual BatchJob JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options);
protected virtual BatchJob PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options);
protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options);
}
[Experimental("OPENAI001")]
public class CreateBatchOperation : OperationResult {
public string BatchId { get; }
public override ContinuationToken? RehydrationToken { get; protected set; }
Expand Down
33 changes: 33 additions & 0 deletions api/OpenAI.netstandard2.0.cs
Original file line number Diff line number Diff line change
Expand Up @@ -1221,9 +1221,42 @@ public class BatchClient {
public virtual Task<CreateBatchOperation> CreateBatchAsync(BinaryContent content, bool waitUntilCompleted, RequestOptions options = null);
public virtual ClientResult GetBatch(string batchId, RequestOptions options);
public virtual Task<ClientResult> GetBatchAsync(string batchId, RequestOptions options);
public virtual CollectionResult<BatchJob> GetBatches(BatchCollectionOptions options = null, CancellationToken cancellationToken = default);
public virtual CollectionResult GetBatches(string after, int? limit, RequestOptions options);
public virtual AsyncCollectionResult<BatchJob> GetBatchesAsync(BatchCollectionOptions options = null, CancellationToken cancellationToken = default);
public virtual AsyncCollectionResult GetBatchesAsync(string after, int? limit, RequestOptions options);
}
public class BatchCollectionOptions : IJsonModel<BatchCollectionOptions>, IPersistableModel<BatchCollectionOptions> {
public string AfterId { get; set; }
public int? PageSizeLimit { get; set; }
protected virtual BatchCollectionOptions JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options);
protected virtual BatchCollectionOptions PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options);
protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options);
}
public class BatchJob : IJsonModel<BatchJob>, IPersistableModel<BatchJob> {
public DateTimeOffset? CancelledAt { get; }
public DateTimeOffset? CancellingAt { get; }
public DateTimeOffset? CompletedAt { get; }
public string CompletionWindow { get; }
public DateTimeOffset CreatedAt { get; }
public string Endpoint { get; }
public string ErrorFileId { get; }
public DateTimeOffset? ExpiredAt { get; }
public DateTimeOffset? ExpiresAt { get; }
public DateTimeOffset? FailedAt { get; }
public DateTimeOffset? FinalizingAt { get; }
public string Id { get; }
public DateTimeOffset? InProgressAt { get; }
public string InputFileId { get; }
public IDictionary<string, string> Metadata { get; }
public string Object { get; }
public string OutputFileId { get; }
protected virtual BatchJob JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options);
protected virtual void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options);
protected virtual BatchJob PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options);
protected virtual BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options);
}
public class CreateBatchOperation : OperationResult {
public string BatchId { get; }
public override ContinuationToken? RehydrationToken { get; protected set; }
Expand Down
8 changes: 8 additions & 0 deletions codegen/generator/src/Visitors/PaginationVisitor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,14 @@ public class PaginationVisitor : ScmLibraryVisitor
{
"GetChatCompletionMessagesAsync",
("ChatCompletionMessageListDatum", "ChatCompletionMessageCollectionOptions", _chatParamsToReplace)
},
{
"GetBatches",
("BatchJob", "BatchCollectionOptions", _chatParamsToReplace)
},
{
"GetBatchesAsync",
("BatchJob", "BatchCollectionOptions", _chatParamsToReplace)
}
};

Expand Down
4 changes: 2 additions & 2 deletions specification/base/typespec/batch/models.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,9 @@ model BatchRequestOutput {
}

model ListBatchesResponse {
data: Batch[];
@pageItems data: Batch[];
first_id?: string;
last_id?: string;
@continuationToken last_id?: string;
has_more: boolean;
object: "list";
}
3 changes: 2 additions & 1 deletion specification/base/typespec/batch/operations.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ interface Batches {
@operationId("listBatches")
@tag("Batch")
@summary("List your organization's batches.")
@list
listBatches(
@header accept: "application/json",

Expand All @@ -55,7 +56,7 @@ interface Batches {
* For instance, if you make a list request and receive 100 objects, ending with obj_foo, your
* subsequent call can include after=obj_foo in order to fetch the next page of the list.
*/
@query after?: string,
@continuationToken @query after?: string,

/**
* A limit on the number of objects to be returned. Limit can range between 1 and 100, and the
Expand Down
15 changes: 15 additions & 0 deletions specification/client/models/batch.models.tsp
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import "../../base/typespec/batch/main.tsp";
import "@azure-tools/typespec-client-generator-core";

using Azure.ClientGenerator.Core;

namespace OpenAI;

@access(Access.public)
@usage(Usage.input)
model BatchCollectionOptions {
...CollectionAfterQueryParameter,
...CollectionLimitQueryParameter,
}


1 change: 1 addition & 0 deletions specification/main.tsp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import "./client/threads.client.tsp";
import "./client/vector-stores.client.tsp";

import "./client/models/audio.models.tsp";
import "./client/models/batch.models.tsp";
import "./client/models/chat.models.tsp";
import "./client/models/common.models.tsp";
import "./client/models/responses.models.tsp";
Expand Down
28 changes: 0 additions & 28 deletions src/Custom/Batch/BatchClient.Protocol.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@

namespace OpenAI.Batch;

[CodeGenSuppress("GetBatches", typeof(string), typeof(int?), typeof(RequestOptions))]
[CodeGenSuppress("GetBatchesAsync", typeof(string), typeof(int?), typeof(RequestOptions))]
public partial class BatchClient
{
/// <summary>
Expand Down Expand Up @@ -66,30 +64,4 @@ public virtual CreateBatchOperation CreateBatch(BinaryContent content, bool wait
CreateBatchOperation operation = this.CreateCreateBatchOperation(batchId, status, response);
return operation.WaitUntil(waitUntilCompleted, options);
}

/// <summary>
/// [Protocol Method] List your organization's batches.
/// </summary>
/// <param name="after"> A cursor for use in pagination. `after` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the list. </param>
/// <param name="limit"> A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. </param>
/// <param name="options"> The request options, which can override default behaviors of the client pipeline on a per-call basis. </param>
/// <exception cref="ClientResultException"> Service returned a non-success status code. </exception>
/// <returns> The response returned from the service. </returns>
public virtual AsyncCollectionResult GetBatchesAsync(string after, int? limit, RequestOptions options)
{
return new AsyncBatchCollectionResult(this, Pipeline, options, limit, after);
}

/// <summary>
/// [Protocol Method] List your organization's batches.
/// </summary>
/// <param name="after"> A cursor for use in pagination. `after` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the list. </param>
/// <param name="limit"> A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. </param>
/// <param name="options"> The request options, which can override default behaviors of the client pipeline on a per-call basis. </param>
/// <exception cref="ClientResultException"> Service returned a non-success status code. </exception>
/// <returns> The response returned from the service. </returns>
public virtual CollectionResult GetBatches(string after, int? limit, RequestOptions options)
{
return new BatchCollectionResult(this, Pipeline, options, limit, after);
}
}
2 changes: 0 additions & 2 deletions src/Custom/Batch/BatchClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ namespace OpenAI.Batch;
[CodeGenSuppress("CancelBatchAsync", typeof(string), typeof(CancellationToken))]
[CodeGenSuppress("CancelBatch", typeof(string), typeof(RequestOptions))]
[CodeGenSuppress("CancelBatchAsync", typeof(string), typeof(RequestOptions))]
[CodeGenSuppress("GetBatches", typeof(string), typeof(int?), typeof(CancellationToken))]
[CodeGenSuppress("GetBatchesAsync", typeof(string), typeof(int?), typeof(CancellationToken))]
public partial class BatchClient
{
// CUSTOM: Added as a convenience.
Expand Down
4 changes: 4 additions & 0 deletions src/Custom/Batch/ChatCompletionCollectionOptions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
namespace OpenAI.Batch;

// CUSTOM: Use the correct namespace.
[CodeGenType("BatchCollectionOptions")] public partial class BatchCollectionOptions { }
4 changes: 4 additions & 0 deletions src/Custom/Batch/GeneratorStubs.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
namespace OpenAI.Batch;

[CodeGenType("Batch")]
public partial class BatchJob { }
3 changes: 0 additions & 3 deletions src/Custom/Batch/Internal/GeneratorStubs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@ internal partial class InternalBatchErrors { }
[CodeGenType("BatchErrorsObject")]
internal readonly partial struct InternalBatchErrorsObject { }

[CodeGenType("Batch")]
internal partial class InternalBatchJob { }

[CodeGenType("BatchObject")]
internal readonly partial struct InternalBatchObject { }

Expand Down
21 changes: 21 additions & 0 deletions src/Generated/BatchClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
using OpenAI;

Expand All @@ -22,6 +23,26 @@ protected BatchClient()

public ClientPipeline Pipeline { get; }

public virtual CollectionResult GetBatches(string after, int? limit, RequestOptions options)
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't we be generating the doc comments?

{
return new BatchClientGetBatchesCollectionResult(this, after, limit, options);
}

public virtual AsyncCollectionResult GetBatchesAsync(string after, int? limit, RequestOptions options)
{
return new BatchClientGetBatchesAsyncCollectionResult(this, after, limit, options);
}

public virtual CollectionResult<BatchJob> GetBatches(BatchCollectionOptions options = default, CancellationToken cancellationToken = default)
{
return new BatchClientGetBatchesCollectionResultOfT(this, options?.AfterId, options?.PageSizeLimit, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null);
}

public virtual AsyncCollectionResult<BatchJob> GetBatchesAsync(BatchCollectionOptions options = default, CancellationToken cancellationToken = default)
{
return new BatchClientGetBatchesAsyncCollectionResultOfT(this, options?.AfterId, options?.PageSizeLimit, cancellationToken.CanBeCanceled ? new RequestOptions { CancellationToken = cancellationToken } : null);
}

public virtual ClientResult GetBatch(string batchId, RequestOptions options)
{
Argument.AssertNotNullOrEmpty(batchId, nameof(batchId));
Expand Down
61 changes: 61 additions & 0 deletions src/Generated/BatchClientGetBatchesAsyncCollectionResult.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// <auto-generated/>

#nullable disable

using System;
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Collections.Generic;

namespace OpenAI.Batch
{
internal partial class BatchClientGetBatchesAsyncCollectionResult : AsyncCollectionResult
{
private readonly BatchClient _client;
private readonly string _after;
private readonly int? _limit;
private readonly RequestOptions _options;

public BatchClientGetBatchesAsyncCollectionResult(BatchClient client, string after, int? limit, RequestOptions options)
{
_client = client;
_after = after;
_limit = limit;
_options = options;
}

public override async IAsyncEnumerable<ClientResult> GetRawPagesAsync()
{
PipelineMessage message = _client.CreateGetBatchesRequest(_after, _limit, _options);
string nextToken = null;
while (true)
{
ClientResult result = ClientResult.FromResponse(await _client.Pipeline.ProcessMessageAsync(message, _options).ConfigureAwait(false));
yield return result;

// Plugin customization: add hasMore assignment
bool hasMore = ((InternalListBatchesResponse)result).HasMore;
nextToken = ((InternalListBatchesResponse)result).LastId;
// Plugin customization: add hasMore == false check to pagination condition
if (nextToken == null || !hasMore)
{
yield break;
}
message = _client.CreateGetBatchesRequest(nextToken, _limit, _options);
}
}

public override ContinuationToken GetContinuationToken(ClientResult page)
{
string nextPage = ((InternalListBatchesResponse)page).LastId;
if (nextPage != null)
{
return ContinuationToken.FromBytes(BinaryData.FromString(nextPage));
}
else
{
return null;
}
}
}
}
71 changes: 71 additions & 0 deletions src/Generated/BatchClientGetBatchesAsyncCollectionResultOfT.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
// <auto-generated/>

#nullable disable

using System;
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Collections.Generic;
using System.Threading.Tasks;

namespace OpenAI.Batch
{
internal partial class BatchClientGetBatchesAsyncCollectionResultOfT : AsyncCollectionResult<BatchJob>
{
private readonly BatchClient _client;
private readonly string _after;
private readonly int? _limit;
private readonly RequestOptions _options;

public BatchClientGetBatchesAsyncCollectionResultOfT(BatchClient client, string after, int? limit, RequestOptions options)
{
_client = client;
_after = after;
_limit = limit;
_options = options;
}

public override async IAsyncEnumerable<ClientResult> GetRawPagesAsync()
{
PipelineMessage message = _client.CreateGetBatchesRequest(_after, _limit, _options);
string nextToken = null;
while (true)
{
ClientResult result = ClientResult.FromResponse(await _client.Pipeline.ProcessMessageAsync(message, _options).ConfigureAwait(false));
yield return result;

// Plugin customization: add hasMore assignment
bool hasMore = ((InternalListBatchesResponse)result).HasMore;
nextToken = ((InternalListBatchesResponse)result).LastId;
// Plugin customization: add hasMore == false check to pagination condition
if (nextToken == null || !hasMore)
{
yield break;
}
message = _client.CreateGetBatchesRequest(nextToken, _limit, _options);
}
}

public override ContinuationToken GetContinuationToken(ClientResult page)
{
string nextPage = ((InternalListBatchesResponse)page).LastId;
if (nextPage != null)
{
return ContinuationToken.FromBytes(BinaryData.FromString(nextPage));
}
else
{
return null;
}
}

protected override async IAsyncEnumerable<BatchJob> GetValuesFromPageAsync(ClientResult page)
{
foreach (BatchJob item in ((InternalListBatchesResponse)page).Data)
{
yield return item;
await Task.Yield();
}
}
}
}
Loading