diff --git a/DESCRIPTION b/DESCRIPTION index 9a376ed..7916f05 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: EndpointR Title: Connects to various Machine Learning inference providers -Version: 0.2.2 +Version: 0.2.3 Authors@R: c( person("Jack", "Penzer", , "jack.penzer@samy.com", role = c("aut", "cre")), person("Ben", "Jessup", , "ben.jessup@samy.com", role = "aut"), diff --git a/NEWS.md b/NEWS.md index c5072c8..d2c7b85 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,12 @@ # EndpointR (dev) +# EndpointR 0.2.3 + +- Bug fix with error message handling, previously passing in raw `error_msg` to cli:: functions, which then interpret as glue, so try to handle '{ }' when they appear in the error messages. Fix is to passing "{error_msg}" already string interpolated. Fix added to OpenAI integrations as well as Anthropic Batch Implementation +- Tests added, and request creation for Ant batches now checks against the RegEx Anthropic provide + + + # EndpointR 0.2.2 ## Anthropic Messages API @@ -16,7 +23,7 @@ Functions for dealing with Anthropic Bathches API, works differently ot the Open - `ant_batch_list()` - `ant_batch_cancel()` -See the [Sync Async Vignette](articles/sync_async.html#anthropic-message-batches-api) for more details +See the [Sync Async Vignette](https://jpcompartir.github.io/EndpointR/articles/sync_async.html#anthropic-message-batches-api) for more details # EndpointR 0.2.1 diff --git a/R/anthropic_batch.R b/R/anthropic_batch.R index 24eb1c0..8c16ded 100644 --- a/R/anthropic_batch.R +++ b/R/anthropic_batch.R @@ -66,6 +66,17 @@ ant_batch_create <- function( "batch cannot exceed 100,000 requests" = length(texts) <= 100000 ) + bad_ids <- !grepl("^[a-zA-Z0-9_-]{1,64}$", custom_ids) + if (any(bad_ids)) { + n_bad <- sum(bad_ids) + examples <- utils::head(custom_ids[bad_ids], 3) + cli::cli_abort(c( + "{.arg custom_ids} must be 1-64 characters using only letters, numbers, hyphens, and underscores", + "i" = "{n_bad} id{?s} {?does/do} not meet this requirement", + "i" = "Example{?s}: {.val {examples}}" + )) + } + if (!is.null(system_prompt)) { if (!rlang::is_scalar_character(system_prompt)) { cli::cli_abort("{.arg system_prompt} must be a {.cls character} of length 1") @@ -131,7 +142,7 @@ ant_batch_create <- function( error_msg <- .extract_api_error(response) cli::cli_abort(c( "Batch creation failed", - "x" = error_msg + "x" = "{error_msg}" )) } @@ -181,7 +192,7 @@ ant_batch_status <- function( error_msg <- .extract_api_error(response) cli::cli_abort(c( "Failed to retrieve batch status", - "x" = error_msg + "x" = "{error_msg}" )) } @@ -252,7 +263,7 @@ ant_batch_results <- function( error_msg <- .extract_api_error(response) cli::cli_abort(c( "Failed to retrieve batch results", - "x" = error_msg + "x" = "{error_msg}" )) } @@ -379,7 +390,7 @@ ant_batch_list <- function( error_msg <- .extract_api_error(response) cli::cli_abort(c( "Failed to list batches", - "x" = error_msg + "x" = "{error_msg}" )) } @@ -428,7 +439,7 @@ ant_batch_cancel <- function( error_msg <- .extract_api_error(response) cli::cli_abort(c( "Failed to cancel batch", - "x" = error_msg + "x" = "{error_msg}" )) } diff --git a/R/anthropic_messages.R b/R/anthropic_messages.R index 671a58f..0503b54 100644 --- a/R/anthropic_messages.R +++ b/R/anthropic_messages.R @@ -250,7 +250,7 @@ ant_complete_text <- function(text, error_msg <- .extract_api_error(response) cli::cli_abort(c( "API request failed", - "x" = error_msg + "x" = "{error_msg}" )) } diff --git a/R/openai_completions.R b/R/openai_completions.R index 9370cf7..4744f07 100644 --- a/R/openai_completions.R +++ b/R/openai_completions.R @@ -278,7 +278,7 @@ oai_complete_text <- function(text, error_msg <- .extract_api_error(response) cli::cli_abort(c( "API request failed", - "x" = error_msg + "x" = "{error_msg}" )) } diff --git a/R/openai_files_api.R b/R/openai_files_api.R index fb03bb8..eb1133a 100644 --- a/R/openai_files_api.R +++ b/R/openai_files_api.R @@ -79,7 +79,7 @@ oai_file_upload <- function(file, purpose = c("batch", "fine-tune", "assistants" error_msg <- result$error$message %||% "Unknown error" cli::cli_abort(c( "Failed to upload file to OpenAI Files API", - "x" = error_msg + "x" = "{error_msg}" )) } diff --git a/man/EndpointR-package.Rd b/man/EndpointR-package.Rd index de47fdd..1475ce8 100644 --- a/man/EndpointR-package.Rd +++ b/man/EndpointR-package.Rd @@ -16,10 +16,11 @@ Useful links: } \author{ -\strong{Maintainer}: Jack Penzer \email{Jack.penzer@sharecreative.com} +\strong{Maintainer}: Jack Penzer \email{jack.penzer@samy.com} Authors: \itemize{ + \item Ben Jessup \email{ben.jessup@samy.com} \item Claude AI } diff --git a/tests/testthat/test-anthropic_batch.R b/tests/testthat/test-anthropic_batch.R index 2a2f5f5..7d74c2d 100644 --- a/tests/testthat/test-anthropic_batch.R +++ b/tests/testthat/test-anthropic_batch.R @@ -30,6 +30,33 @@ test_that("ant_batch_create validates inputs", { ), "batch cannot exceed 100,000 requests" ) + + # too long + expect_error( + ant_batch_create( + texts = c("a", "b"), + custom_ids = c("short_id", strrep("x", 65)) + ), + "1-64 characters" + ) + + # invalid characters (colons) + expect_error( + ant_batch_create( + texts = c("a"), + custom_ids = c("BLUESKY_did:plc:abc123") + ), + "letters, numbers, hyphens, and underscores" + ) + + # mix of too long and invalid chars + expect_error( + ant_batch_create( + texts = c("a", "b", "c"), + custom_ids = c("ok", strrep("a", 100), "has:colons") + ), + "2 ids do not meet" + ) }) test_that("ant_batch_status validates batch_id", { @@ -210,6 +237,19 @@ test_that("ant_batch_results parses JSONL correctly into expected tibble", { expect_equal(errored$.error_msg, "Invalid request") }) +test_that("ant_batch_create error messages with curly braces don't crash cli", { + withr::local_envvar(ANTHROPIC_API_KEY = "test-key") + + expect_error( + ant_batch_create( + texts = c("Hello", "World"), + custom_ids = c("t1", "t2"), + endpoint_url = server$url("/test_ant_batch_create_error_with_braces") + ), + "Batch creation failed" + ) +}) + test_that("ant_batch_results errors when batch not ended", { withr::local_envvar(ANTHROPIC_API_KEY = "test-key")