From bb3f27a4d7ad3f5a25fb6ba5aa9ddff8fd09166d Mon Sep 17 00:00:00 2001 From: Alan Date: Fri, 19 Sep 2025 19:32:51 +0200 Subject: [PATCH 1/6] add functionality to handle Infinity and Nan values --- src/UMBridge.jl | 98 ++++++++++++++++++++++++------------------------ src/inf_nan.jl | 20 ++++++++++ test/runtests.jl | 51 ++++++++++++++++++++----- 3 files changed, 112 insertions(+), 57 deletions(-) create mode 100644 src/inf_nan.jl diff --git a/src/UMBridge.jl b/src/UMBridge.jl index 5f7dbe8..217a1a5 100644 --- a/src/UMBridge.jl +++ b/src/UMBridge.jl @@ -5,6 +5,8 @@ import JSON import Base.Threads using Parameters using Sockets +include("inf_nan.jl") + # Make HTTP request following UM-Bridge protocol struct HTTPModel @@ -34,7 +36,7 @@ function evaluate(model, input, config = Dict()) "config" => config ) - response = HTTP.request("POST", url(model) * "/Evaluate", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/Evaluate", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -52,7 +54,7 @@ function gradient(model::HTTPModel, out_wrt, in_wrt, input, sens, config = Dict( "config" => config ) - response = HTTP.request("POST", url(model) * "/Gradient", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/Gradient", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -69,7 +71,7 @@ function apply_jacobian(model::HTTPModel, out_wrt, in_wrt, input, vec, config = "config" => config ) - response = HTTP.request("POST", url(model) * "/ApplyJacobian", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ApplyJacobian", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -88,7 +90,7 @@ function apply_hessian(model::HTTPModel, out_wrt, in_wrt1, in_wrt2, input, vec, "config" => config ) - response = HTTP.request("POST", url(model) * "/ApplyHessian", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ApplyHessian", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -116,7 +118,7 @@ function model_input_sizes(model::HTTPModel, config = Dict()) "name" => name(model), "config" => config ) - response = HTTP.request("POST", url(model) * "/InputSizes", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/InputSizes", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -128,7 +130,7 @@ function model_output_sizes(model::HTTPModel, config = Dict()) "name" => name(model), "config" => config ) - response = HTTP.request("POST", url(model) * "/OutputSizes", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/OutputSizes", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -139,7 +141,7 @@ function supports_evaluate(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -150,7 +152,7 @@ function supports_gradient(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -161,7 +163,7 @@ function supports_apply_jacobian(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -172,7 +174,7 @@ function supports_apply_hessian(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=JSON.json(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -241,7 +243,7 @@ function runtime_error(model::Model, e, str1, str2, str3) "message" => "Model was unable to provide a valid " * str3 * " due to: " * string(e) * result ) ) - return HTTP.Response(500, JSON.json(body)) + return HTTP.Response(500, jsonify(body)) end function get_model_from_name(models::Vector, model_name::String) @@ -269,7 +271,7 @@ function inputRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end # Extract config @@ -283,7 +285,7 @@ function inputRequest(models::Vector) body = Dict( "inputSizes" => model.inputSizes ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") end @@ -308,7 +310,7 @@ function outputRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end # Extract config @@ -322,7 +324,7 @@ function outputRequest(models::Vector) body = Dict( "outputSizes" => model.outputSizes ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) catch e return runtime_error(model, e, "the evaluation of outputSizes", "OutputSizes", "output size") end @@ -337,7 +339,7 @@ function infoRequest(models::Vector) "protocolVersion" => 1.0, "models" => [model.name for model in models] ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) end return handler end @@ -353,7 +355,7 @@ function modelinfoRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end body = Dict( "support" => Dict( @@ -362,7 +364,7 @@ function modelinfoRequest(models::Vector) "ApplyJacobian" => supportsJacobian(model), "ApplyHessian" => supportsHessian(model) )) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) end return handler end @@ -388,7 +390,7 @@ function evaluateRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end # Extract inputs and check @@ -401,7 +403,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -417,7 +419,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -431,7 +433,7 @@ function evaluateRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end for i in eachindex(model_parameters) @@ -442,7 +444,7 @@ function evaluateRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -451,7 +453,7 @@ function evaluateRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end end @@ -479,7 +481,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid output" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of outputSizes", "OutputSizes", "output size") @@ -493,7 +495,7 @@ function evaluateRequest(models::Vector) "message" => "Output must be an array of arrays!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if length(output[i]) != model.outputSizes[i] body = Dict( @@ -502,13 +504,13 @@ function evaluateRequest(models::Vector) "message" => "Output parameter $i has invalid length! Expected $(model.outputSizes[i]) but got $(length(output[i])) instead!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end end body = Dict( "output" => output ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) end return handler end @@ -532,7 +534,7 @@ function gradientRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if !supportsGradient(model) body = Dict( @@ -541,7 +543,7 @@ function gradientRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end model_inWrt = parsed_body["inWrt"] + 1 # account for julia indices starting at 1 @@ -553,7 +555,7 @@ function gradientRequest(models::Vector) "message" => "Invalid inWrt index! Expected between 0 and and number of inputs minus one, but got " * string(model_inWrt - 1) ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -567,7 +569,7 @@ function gradientRequest(models::Vector) "message" => "Invalid outWrt index! Expected between 0 and and number of inputs minus one, but got " * string(model_outWrt - 1) ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -583,7 +585,7 @@ function gradientRequest(models::Vector) "message" => "Invalid number of input parameters" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -598,7 +600,7 @@ function gradientRequest(models::Vector) "message" => "Input parameter $i must be an array!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if length(model_parameters[i]) != model.inputSizes[i] @@ -608,7 +610,7 @@ function gradientRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -627,7 +629,7 @@ function gradientRequest(models::Vector) body = Dict( "output" => output ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) end return handler @@ -652,7 +654,7 @@ function applyJacobianRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if !supportsJacobian(model) body = Dict( @@ -661,7 +663,7 @@ function applyJacobianRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end @@ -677,7 +679,7 @@ function applyJacobianRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end for i in eachindex(model_parameters) @@ -688,7 +690,7 @@ function applyJacobianRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -697,7 +699,7 @@ function applyJacobianRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end end @@ -715,7 +717,7 @@ function applyJacobianRequest(models::Vector) end body = Dict("output" => output) - return HTTP.Response(200, JSON.json(body)) + return HTTP.Response(200, jsonify(body)) end return handler end @@ -739,7 +741,7 @@ function applyHessianRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if !supportsHessian(model) body = Dict( @@ -748,7 +750,7 @@ function applyHessianRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end model_inWrt1 = parsed_body["inWrt1"] @@ -765,7 +767,7 @@ function applyHessianRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end for i in eachindex(model_parameters) @@ -776,7 +778,7 @@ function applyHessianRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -785,7 +787,7 @@ function applyHessianRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, JSON.json(body)) + return HTTP.Response(400, jsonify(body)) end end @@ -803,7 +805,7 @@ function applyHessianRequest(models::Vector) body = Dict( "output" => output ) - return HTTP.Response(JSON.json(body)) + return HTTP.Response(jsonify(body)) end return handler end diff --git a/src/inf_nan.jl b/src/inf_nan.jl new file mode 100644 index 0000000..2816b59 --- /dev/null +++ b/src/inf_nan.jl @@ -0,0 +1,20 @@ + +import JSON: show_json +import JSON.Serializations: CommonSerialization, StandardSerialization +import JSON: StructuralContext + +struct NaNSerialization <: CommonSerialization end + +function show_json(io::StructuralContext, ::NaNSerialization, f::AbstractFloat) + if f==Inf + Base.print(io, "Infinity") + elseif f == -Inf + Base.print(io, "-Infinity") + elseif f == NaN + Base.print(io, "NaN") + else + Base.print(io, f) + end +end + +jsonify(object::Any; allow_infnan=true) = sprint(show_json, allow_infnan ? NaNSerialization() : StandardSerialization(), object) diff --git a/test/runtests.jl b/test/runtests.jl index 914d7e9..8f40400 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -17,8 +17,8 @@ function testserver_sizes(models) "name" => UMBridge.name(models[1]), "config" => Dict() ) - response_input = UMBridge.inputRequest(models)(HTTP.Request("POST", "/InputSizes", [], JSON.json(body))) - response_output = UMBridge.outputRequest(models)(HTTP.Request("POST", "/OutputSizes", [], JSON.json(body))) + response_input = UMBridge.inputRequest(models)(HTTP.Request("POST", "/InputSizes", [], UMBridge.jsonify(body))) + response_output = UMBridge.outputRequest(models)(HTTP.Request("POST", "/OutputSizes", [], UMBridge.jsonify(body))) all([response_input.status == 200, response_output.status == 200]) end @@ -27,7 +27,7 @@ function testserver_info(models) "name" => UMBridge.name(models[1]), "config" => Dict() ) - response_input = UMBridge.infoRequest(models)(HTTP.Request("GET", "/Info", [], JSON.json(body))) + response_input = UMBridge.infoRequest(models)(HTTP.Request("GET", "/Info", [], UMBridge.jsonify(body))) return response_input.status == 200 end @@ -38,7 +38,7 @@ function testserver_evaluate(models) "input" => [[1.0]], "config" => Dict() ) - response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], JSON.json(body))) + response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body))) return response_input.status == 200 end @@ -53,7 +53,7 @@ function testserver_gradient(models) "input" => [[1.0]], "config" => Dict() ) - response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], JSON.json(body))) + response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body))) return response_input.status == 200 end @@ -68,7 +68,7 @@ function testserver_jacobian(models) "vec" => [1.0], "config" => Dict() ) - response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], JSON.json(body))) + response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body))) return response_input.status == 200 end @@ -85,7 +85,7 @@ function testserver_hessian(models) "sens" => [1], "config" => Dict() ) - response_input = UMBridge.applyHessianRequest(models)(HTTP.Request("POST", "/ApplyHessian", [], JSON.json(body))) + response_input = UMBridge.applyHessianRequest(models)(HTTP.Request("POST", "/ApplyHessian", [], UMBridge.jsonify(body))) return response_input.status == 200 end @@ -164,7 +164,7 @@ function testserver_gradient_1D(models) ) # Make gradient request - response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], JSON.json(body))) + response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body))) expected_gradient = models[1].gradient(1, 1, input, sens, Dict()) # Verify the gradient application result @@ -200,7 +200,7 @@ function testserver_jacobian_2D(models) ) # Make jacobian request - response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], JSON.json(body))) + response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body))) expected_jacobian_application = models[1].applyJacobian(1, 1, input, vect, Dict()) @@ -212,3 +212,36 @@ end @testset "UMBridge 2D Apply Jacobian Test" begin @test testserver_jacobian_2D([model_2D]) end + + +# Define model for 1D function f(x) = x^2 +model_infnan = UMBridge.Model( + name = "inf_nan", + inputSizes = [4], + outputSizes = [4], + evaluate = (input, config) -> [input[1]] +) + + +function testserver_model_infnan(models) + input = [[0.0, Inf, -Inf, NaN]] # Example input + + body = Dict( + "name" => UMBridge.name(models[1]), + "input" => input, + "config" => Dict() + ) + + # Make evaluate request + response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body))) + expected_output = models[1].evaluate(input, Dict()) + + # Verify if Infinity and NaN values are handled correctly + return response_input.status == 200 && + isequal(String(response_input.body), "{\"output\":[[0.0,Infinity,-Infinity,NaN]]}") && + isequal(convert(Vector{Vector{Float64}}, JSON.parse(String(response_input.body))["output"]), expected_output) +end + +@testset "UMBridge Inf NaN Test" begin + @test testserver_model_infnan([model_infnan]) +end From b0c6ca7dab49c194545bb75919d884ca0a927de4 Mon Sep 17 00:00:00 2001 From: Alan Date: Fri, 19 Sep 2025 19:33:32 +0200 Subject: [PATCH 2/6] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e7b98f0..9b1bc78 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "UMBridge" uuid = "0ac74fe0-b753-4e62-be71-04a8383fbbef" authors = ["UM-Bridge Team"] -version = "1.1.7" +version = "1.2.0" [deps] HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" From 1b5ef6154984ab94c4f00063b06fdf5f4f3abfc8 Mon Sep 17 00:00:00 2001 From: Alan Date: Fri, 19 Sep 2025 19:43:15 +0200 Subject: [PATCH 3/6] explicitly provide allow_infnan=true --- src/UMBridge.jl | 96 ++++++++++++++++++++++++------------------------- src/inf_nan.jl | 2 +- 2 files changed, 49 insertions(+), 49 deletions(-) diff --git a/src/UMBridge.jl b/src/UMBridge.jl index 217a1a5..b385ea5 100644 --- a/src/UMBridge.jl +++ b/src/UMBridge.jl @@ -36,7 +36,7 @@ function evaluate(model, input, config = Dict()) "config" => config ) - response = HTTP.request("POST", url(model) * "/Evaluate", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/Evaluate", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -54,7 +54,7 @@ function gradient(model::HTTPModel, out_wrt, in_wrt, input, sens, config = Dict( "config" => config ) - response = HTTP.request("POST", url(model) * "/Gradient", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/Gradient", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -71,7 +71,7 @@ function apply_jacobian(model::HTTPModel, out_wrt, in_wrt, input, vec, config = "config" => config ) - response = HTTP.request("POST", url(model) * "/ApplyJacobian", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ApplyJacobian", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -90,7 +90,7 @@ function apply_hessian(model::HTTPModel, out_wrt, in_wrt1, in_wrt2, input, vec, "config" => config ) - response = HTTP.request("POST", url(model) * "/ApplyHessian", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ApplyHessian", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -118,7 +118,7 @@ function model_input_sizes(model::HTTPModel, config = Dict()) "name" => name(model), "config" => config ) - response = HTTP.request("POST", url(model) * "/InputSizes", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/InputSizes", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -130,7 +130,7 @@ function model_output_sizes(model::HTTPModel, config = Dict()) "name" => name(model), "config" => config ) - response = HTTP.request("POST", url(model) * "/OutputSizes", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/OutputSizes", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -141,7 +141,7 @@ function supports_evaluate(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -152,7 +152,7 @@ function supports_gradient(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -163,7 +163,7 @@ function supports_apply_jacobian(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -174,7 +174,7 @@ function supports_apply_hessian(model::HTTPModel) body = Dict( "name" => name(model) ) - response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body)) + response = HTTP.request("POST", url(model) * "/ModelInfo", body=jsonify(body; allow_infnan=true)) check_response(response, 200) parsed = JSON.parse(String(response.body)) check_parsed_response(parsed) @@ -243,7 +243,7 @@ function runtime_error(model::Model, e, str1, str2, str3) "message" => "Model was unable to provide a valid " * str3 * " due to: " * string(e) * result ) ) - return HTTP.Response(500, jsonify(body)) + return HTTP.Response(500, jsonify(body; allow_infnan=true)) end function get_model_from_name(models::Vector, model_name::String) @@ -271,7 +271,7 @@ function inputRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end # Extract config @@ -285,7 +285,7 @@ function inputRequest(models::Vector) body = Dict( "inputSizes" => model.inputSizes ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") end @@ -310,7 +310,7 @@ function outputRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end # Extract config @@ -324,7 +324,7 @@ function outputRequest(models::Vector) body = Dict( "outputSizes" => model.outputSizes ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) catch e return runtime_error(model, e, "the evaluation of outputSizes", "OutputSizes", "output size") end @@ -339,7 +339,7 @@ function infoRequest(models::Vector) "protocolVersion" => 1.0, "models" => [model.name for model in models] ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) end return handler end @@ -355,7 +355,7 @@ function modelinfoRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end body = Dict( "support" => Dict( @@ -364,7 +364,7 @@ function modelinfoRequest(models::Vector) "ApplyJacobian" => supportsJacobian(model), "ApplyHessian" => supportsHessian(model) )) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) end return handler end @@ -390,7 +390,7 @@ function evaluateRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end # Extract inputs and check @@ -403,7 +403,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -419,7 +419,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -433,7 +433,7 @@ function evaluateRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end for i in eachindex(model_parameters) @@ -444,7 +444,7 @@ function evaluateRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -453,7 +453,7 @@ function evaluateRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end end @@ -481,7 +481,7 @@ function evaluateRequest(models::Vector) "message" => "Invalid output" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of outputSizes", "OutputSizes", "output size") @@ -495,7 +495,7 @@ function evaluateRequest(models::Vector) "message" => "Output must be an array of arrays!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if length(output[i]) != model.outputSizes[i] body = Dict( @@ -504,13 +504,13 @@ function evaluateRequest(models::Vector) "message" => "Output parameter $i has invalid length! Expected $(model.outputSizes[i]) but got $(length(output[i])) instead!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end end body = Dict( "output" => output ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) end return handler end @@ -534,7 +534,7 @@ function gradientRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if !supportsGradient(model) body = Dict( @@ -543,7 +543,7 @@ function gradientRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end model_inWrt = parsed_body["inWrt"] + 1 # account for julia indices starting at 1 @@ -555,7 +555,7 @@ function gradientRequest(models::Vector) "message" => "Invalid inWrt index! Expected between 0 and and number of inputs minus one, but got " * string(model_inWrt - 1) ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -569,7 +569,7 @@ function gradientRequest(models::Vector) "message" => "Invalid outWrt index! Expected between 0 and and number of inputs minus one, but got " * string(model_outWrt - 1) ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -585,7 +585,7 @@ function gradientRequest(models::Vector) "message" => "Invalid number of input parameters" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -600,7 +600,7 @@ function gradientRequest(models::Vector) "message" => "Input parameter $i must be an array!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if length(model_parameters[i]) != model.inputSizes[i] @@ -610,7 +610,7 @@ function gradientRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end catch e return runtime_error(model, e, "the evaluation of inputSizes", "InputSizes", "input size") @@ -629,7 +629,7 @@ function gradientRequest(models::Vector) body = Dict( "output" => output ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) end return handler @@ -654,7 +654,7 @@ function applyJacobianRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if !supportsJacobian(model) body = Dict( @@ -663,7 +663,7 @@ function applyJacobianRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end @@ -679,7 +679,7 @@ function applyJacobianRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end for i in eachindex(model_parameters) @@ -690,7 +690,7 @@ function applyJacobianRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -699,7 +699,7 @@ function applyJacobianRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end end @@ -717,7 +717,7 @@ function applyJacobianRequest(models::Vector) end body = Dict("output" => output) - return HTTP.Response(200, jsonify(body)) + return HTTP.Response(200, jsonify(body; allow_infnan=true)) end return handler end @@ -741,7 +741,7 @@ function applyHessianRequest(models::Vector) "message" => "Model name not found" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if !supportsHessian(model) body = Dict( @@ -750,7 +750,7 @@ function applyHessianRequest(models::Vector) "message" => "Unsupported feature" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end model_inWrt1 = parsed_body["inWrt1"] @@ -767,7 +767,7 @@ function applyHessianRequest(models::Vector) "message" => "Invalid input" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end for i in eachindex(model_parameters) @@ -778,7 +778,7 @@ function applyHessianRequest(models::Vector) "message" => "Input must be an array of arrays!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end if length(model_parameters[i]) != model.inputSizes[i] body = Dict( @@ -787,7 +787,7 @@ function applyHessianRequest(models::Vector) "message" => "Input parameter $i has invalid length! Expected $(model.inputSizes[i]) but got $(length(model_parameters[i])) instead!" ) ) - return HTTP.Response(400, jsonify(body)) + return HTTP.Response(400, jsonify(body; allow_infnan=true)) end end @@ -805,7 +805,7 @@ function applyHessianRequest(models::Vector) body = Dict( "output" => output ) - return HTTP.Response(jsonify(body)) + return HTTP.Response(jsonify(body; allow_infnan=true)) end return handler end diff --git a/src/inf_nan.jl b/src/inf_nan.jl index 2816b59..8daa45d 100644 --- a/src/inf_nan.jl +++ b/src/inf_nan.jl @@ -17,4 +17,4 @@ function show_json(io::StructuralContext, ::NaNSerialization, f::AbstractFloat) end end -jsonify(object::Any; allow_infnan=true) = sprint(show_json, allow_infnan ? NaNSerialization() : StandardSerialization(), object) +jsonify(object::Any; allow_infnan=false) = sprint(show_json, allow_infnan ? NaNSerialization() : StandardSerialization(), object) From 3ea64394d4b5921ad2058be40a54f070c4d3cf92 Mon Sep 17 00:00:00 2001 From: Alan Date: Fri, 19 Sep 2025 19:43:28 +0200 Subject: [PATCH 4/6] update test model description --- test/runtests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runtests.jl b/test/runtests.jl index 8f40400..ca8db5c 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -214,7 +214,7 @@ end end -# Define model for 1D function f(x) = x^2 +# Define dummy model for Infinity and NaN values in input and output model_infnan = UMBridge.Model( name = "inf_nan", inputSizes = [4], From a80bbc6aecb1a1fc66ab3a6a7db66e2b134aeda6 Mon Sep 17 00:00:00 2001 From: Alan Date: Sat, 20 Sep 2025 10:48:10 +0200 Subject: [PATCH 5/6] fix tests, use allow_infnan flag --- test/runtests.jl | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/test/runtests.jl b/test/runtests.jl index ca8db5c..fa82229 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -17,8 +17,8 @@ function testserver_sizes(models) "name" => UMBridge.name(models[1]), "config" => Dict() ) - response_input = UMBridge.inputRequest(models)(HTTP.Request("POST", "/InputSizes", [], UMBridge.jsonify(body))) - response_output = UMBridge.outputRequest(models)(HTTP.Request("POST", "/OutputSizes", [], UMBridge.jsonify(body))) + response_input = UMBridge.inputRequest(models)(HTTP.Request("POST", "/InputSizes", [], UMBridge.jsonify(body; allow_infnan=true))) + response_output = UMBridge.outputRequest(models)(HTTP.Request("POST", "/OutputSizes", [], UMBridge.jsonify(body; allow_infnan=true))) all([response_input.status == 200, response_output.status == 200]) end @@ -27,7 +27,7 @@ function testserver_info(models) "name" => UMBridge.name(models[1]), "config" => Dict() ) - response_input = UMBridge.infoRequest(models)(HTTP.Request("GET", "/Info", [], UMBridge.jsonify(body))) + response_input = UMBridge.infoRequest(models)(HTTP.Request("GET", "/Info", [], UMBridge.jsonify(body; allow_infnan=true))) return response_input.status == 200 end @@ -38,7 +38,7 @@ function testserver_evaluate(models) "input" => [[1.0]], "config" => Dict() ) - response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body))) + response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body; allow_infnan=true))) return response_input.status == 200 end @@ -53,7 +53,7 @@ function testserver_gradient(models) "input" => [[1.0]], "config" => Dict() ) - response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body))) + response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body; allow_infnan=true))) return response_input.status == 200 end @@ -68,7 +68,7 @@ function testserver_jacobian(models) "vec" => [1.0], "config" => Dict() ) - response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body))) + response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body; allow_infnan=true))) return response_input.status == 200 end @@ -85,7 +85,7 @@ function testserver_hessian(models) "sens" => [1], "config" => Dict() ) - response_input = UMBridge.applyHessianRequest(models)(HTTP.Request("POST", "/ApplyHessian", [], UMBridge.jsonify(body))) + response_input = UMBridge.applyHessianRequest(models)(HTTP.Request("POST", "/ApplyHessian", [], UMBridge.jsonify(body; allow_infnan=true))) return response_input.status == 200 end @@ -164,7 +164,7 @@ function testserver_gradient_1D(models) ) # Make gradient request - response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body))) + response_input = UMBridge.gradientRequest(models)(HTTP.Request("POST", "/Gradient", [], UMBridge.jsonify(body; allow_infnan=true))) expected_gradient = models[1].gradient(1, 1, input, sens, Dict()) # Verify the gradient application result @@ -200,7 +200,7 @@ function testserver_jacobian_2D(models) ) # Make jacobian request - response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body))) + response_input = UMBridge.applyJacobianRequest(models)(HTTP.Request("POST", "/ApplyJacobian", [], UMBridge.jsonify(body; allow_infnan=true))) expected_jacobian_application = models[1].applyJacobian(1, 1, input, vect, Dict()) @@ -214,7 +214,7 @@ end end -# Define dummy model for Infinity and NaN values in input and output +# Define model for 1D function f(x) = x^2 model_infnan = UMBridge.Model( name = "inf_nan", inputSizes = [4], @@ -233,7 +233,7 @@ function testserver_model_infnan(models) ) # Make evaluate request - response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body))) + response_input = UMBridge.evaluateRequest(models)(HTTP.Request("POST", "/Evaluate", [], UMBridge.jsonify(body; allow_infnan=true))) expected_output = models[1].evaluate(input, Dict()) # Verify if Infinity and NaN values are handled correctly From be292525c1597044720b74af05e50d37d648db62 Mon Sep 17 00:00:00 2001 From: Alan Date: Sat, 20 Sep 2025 10:59:13 +0200 Subject: [PATCH 6/6] update test model description --- test/runtests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runtests.jl b/test/runtests.jl index fa82229..7c221ac 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -214,7 +214,7 @@ end end -# Define model for 1D function f(x) = x^2 +# Define dummy model for Inifinity and Nan values model_infnan = UMBridge.Model( name = "inf_nan", inputSizes = [4],