From 3df1a7fc1deb7ef7d8bebc982b3b90694ee484fb Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Fri, 23 Jan 2026 09:34:25 -0600 Subject: [PATCH 01/16] update base, readme, example --- README.md | 25 +++++++++++++------------ docs/examples/basic/xopt_basic.ipynb | 24 +++++++++++------------- xopt/base.py | 19 +++++++++++++++---- 3 files changed, 39 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index 3999d55f0..af42ee4e7 100644 --- a/README.md +++ b/README.md @@ -94,23 +94,24 @@ generator: population_size: 64 population_file: test.csv output_path: . + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: + y1: MINIMIZE + y2: MINIMIZE + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: dummy_constant} evaluator: function: my_function function_kwargs: my_arguments: 42 -vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: - y1: MINIMIZE - y2: MINIMIZE - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: dummy_constant} + stopping_condition: name: MaxEvaluationsCondition @@ -139,7 +140,7 @@ def sin_function(input_dict): # create Xopt evaluator, generator, and Xopt objects evaluator = Evaluator(function=sin_function) generator = UpperConfidenceBoundGenerator(vocs=vocs) -X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs) +X = Xopt(evaluator=evaluator, generator=generator) # call X.random_evaluate() to generate + evaluate 3 initial points X.random_evaluate(3) diff --git a/docs/examples/basic/xopt_basic.ipynb b/docs/examples/basic/xopt_basic.ipynb index c34539789..0b104e940 100644 --- a/docs/examples/basic/xopt_basic.ipynb +++ b/docs/examples/basic/xopt_basic.ipynb @@ -32,14 +32,13 @@ }, "source": [ "## Xopt Components\n", - "The definition of the Xopt object requires 3 parts, listed below:\n", + "The definition of the Xopt object requires 2 parts, listed below:\n", "- The `Evaluator` object, which evaluates input points using the arbitrary function\n", "specified by the `function` property.\n", "- The `Generator` object, which, when given data that has been evaluated, generates\n", "future points to evaluate using the evaluator.\n", - "- The `VOCS` (variables, objectives, constraints, statics) object, which specifies the\n", - "input domain, the objectives, constraints and constants passed to the evaluator\n", - "function.\n" + " - The `VOCS` (variables, objectives, constraints, statics) object, which specifies the input domain, the objectives, constraints and constants passed to the evaluator\n", + " function.\n" ] }, { @@ -224,7 +223,7 @@ }, "outputs": [], "source": [ - "X = Xopt(vocs=vocs, generator=generator, evaluator=evaluator)" + "X = Xopt(generator=generator, evaluator=evaluator)" ] }, { @@ -259,14 +258,13 @@ "\n", "generator:\n", " name: random\n", - "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives: {f: MINIMIZE}\n", - " constraints:\n", - " g: [LESS_THAN, 0]\n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives: {f: MINIMIZE}\n", + " constraints:\n", + " g: [LESS_THAN, 0]\n", "\n", "\"\"\"" ] diff --git a/xopt/base.py b/xopt/base.py index 217bd9daf..230c39d77 100644 --- a/xopt/base.py +++ b/xopt/base.py @@ -41,9 +41,6 @@ class Xopt(XoptBaseModel): Parameters ---------- - vocs : VOCS - VOCS object for defining the problem's variables, objectives, constraints, and - statics. generator : SerializeAsAny[Generator] An object responsible for generating candidates for optimization. evaluator : SerializeAsAny[Evaluator] @@ -96,7 +93,6 @@ class Xopt(XoptBaseModel): Serializes the Xopt configuration to a JSON string. """ - vocs: VOCS = Field(description="VOCS object for Xopt") generator: Union[SerializeAsAny[Generator], Any] = Field( description="generator object for Xopt" ) @@ -207,6 +203,21 @@ def n_data(self) -> int: return 0 else: return len(self.data) + + @property + def vocs(self) -> VOCS: + """ + Get the VOCS object from the generator. + + Returns + ------- + VOCS + The VOCS object associated with the generator. + """ + if self.generator is None: + raise ValueError("generator is not set") + + return self.generator.vocs def __init__(self, *args, **kwargs): """ From c19e6f6e6027c71a167943358d8116df9c307787 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 11:23:35 -0600 Subject: [PATCH 02/16] fix tests --- xopt/base.py | 17 +- xopt/generator.py | 2 +- xopt/resources/testing.py | 21 +- xopt/tests/generators/bayesian/test_bax.py | 6 +- .../generators/bayesian/test_bax_visualize.py | 4 +- .../bayesian/test_bayesian_exploration.py | 6 +- .../bayesian/test_bayesian_generator.py | 2 +- .../bayesian/test_expected_improvement.py | 2 +- .../generators/bayesian/test_high_level.py | 66 ++-- xopt/tests/generators/bayesian/test_mggpo.py | 6 +- xopt/tests/generators/bayesian/test_mobo.py | 4 +- xopt/tests/generators/bayesian/test_turbo.py | 5 +- .../bayesian/test_upper_confidence_bound.py | 22 +- xopt/tests/generators/bayesian/test_utils.py | 8 +- .../generators/bayesian/test_visualize.py | 2 +- xopt/tests/generators/external/test_aposmm.py | 4 - xopt/tests/generators/ga/test_cnsga.py | 37 +- xopt/tests/generators/ga/test_nsga2.py | 336 +++++++++--------- .../sequential/test_extremum_seeking.py | 4 +- .../generators/sequential/test_neldermead.py | 35 +- xopt/tests/generators/sequential/test_rcds.py | 17 +- .../sequential/test_serialization.py | 2 +- xopt/tests/test_evaluator.py | 2 +- xopt/tests/test_io.py | 4 +- xopt/tests/test_numerical_optimizer.py | 2 +- xopt/tests/test_perf.py | 2 +- xopt/tests/test_stopping_condition.py | 2 - xopt/tests/test_xopt.py | 76 ++-- 28 files changed, 329 insertions(+), 367 deletions(-) diff --git a/xopt/base.py b/xopt/base.py index 230c39d77..4aac5633c 100644 --- a/xopt/base.py +++ b/xopt/base.py @@ -130,26 +130,13 @@ def validate_model(cls, data: Any): Validate the Xopt model by checking the generator and evaluator. """ if isinstance(data, dict): - # validate vocs - if isinstance(data["vocs"], dict): - data["vocs"] = VOCS(**data["vocs"]) - # validate generator if isinstance(data["generator"], dict): name = data["generator"].pop("name") generator_class = get_generator(name) - data["generator"] = generator_class.model_validate( - {**data["generator"], "vocs": data["vocs"]} - ) - elif isinstance(data["generator"], str): - generator_class = get_generator(data["generator"]) - - data["generator"] = generator_class.model_validate( - {"vocs": data["vocs"]} - ) + data["generator"] = generator_class.model_validate(data["generator"]) # make a copy of the generator / vocs objects to avoid modifying the original - data["vocs"] = deepcopy(data["vocs"]) data["generator"] = deepcopy(data["generator"]) return data @@ -203,7 +190,7 @@ def n_data(self) -> int: return 0 else: return len(self.data) - + @property def vocs(self) -> VOCS: """ diff --git a/xopt/generator.py b/xopt/generator.py index 6b93bed44..d6191e772 100644 --- a/xopt/generator.py +++ b/xopt/generator.py @@ -70,7 +70,7 @@ class Generator(XoptBaseModel, BaseGenerator, ABC): exclude=True, ) - vocs: VOCS = Field(description="generator VOCS", exclude=True) + vocs: VOCS = Field(description="generator VOCS") data: Optional[pd.DataFrame] = Field( None, description="generator data", exclude=True ) diff --git a/xopt/resources/testing.py b/xopt/resources/testing.py index 03f2903f0..cc4133429 100644 --- a/xopt/resources/testing.py +++ b/xopt/resources/testing.py @@ -310,20 +310,19 @@ def set_options(gen, use_cuda=False, add_data=False): TEST_YAML = """ generator: name: random + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: ['LESS_THAN', 0.5] + constants: + constant1: 1 evaluator: function: xopt.resources.testing.xtest_callable function_kwargs: a: 5 - -vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: ['LESS_THAN', 0.5] - constants: - constant1: 1 """ diff --git a/xopt/tests/generators/bayesian/test_bax.py b/xopt/tests/generators/bayesian/test_bax.py index cc4541da9..f79a67a9c 100644 --- a/xopt/tests/generators/bayesian/test_bax.py +++ b/xopt/tests/generators/bayesian/test_bax.py @@ -230,7 +230,7 @@ def test_in_xopt(self): ) gen.numerical_optimizer.n_restarts = 1 - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=gen, evaluator=evaluator) # initialize with single initial candidate xopt.random_evaluate(3) @@ -246,7 +246,7 @@ def test_file_saving(self): gen = BaxGenerator(vocs=test_vocs, algorithm=alg, algorithm_results_file="test") gen.numerical_optimizer.n_restarts = 1 - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=gen, evaluator=evaluator) # initialize with single initial candidate xopt.random_evaluate(3) @@ -325,7 +325,7 @@ def test_visualization(self): gen = BaxGenerator(vocs=test_vocs, algorithm=alg, n_monte_carlo_samples=10) gen.numerical_optimizer.n_restarts = 1 - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=gen, evaluator=evaluator) # initialize with single initial candidate xopt.random_evaluate(3) diff --git a/xopt/tests/generators/bayesian/test_bax_visualize.py b/xopt/tests/generators/bayesian/test_bax_visualize.py index ccef0f0cf..d4ff02151 100644 --- a/xopt/tests/generators/bayesian/test_bax_visualize.py +++ b/xopt/tests/generators/bayesian/test_bax_visualize.py @@ -28,7 +28,7 @@ def bax_generator(self): gen.numerical_optimizer.n_restarts = 1 - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=gen, evaluator=evaluator) xopt.random_evaluate(3) xopt.step() @@ -78,7 +78,7 @@ def test_unsupported_dim_x(self, bax_generator): gen.numerical_optimizer.n_restarts = 1 - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=gen, evaluator=evaluator) # initialize with single initial candidate xopt.random_evaluate(3) diff --git a/xopt/tests/generators/bayesian/test_bayesian_exploration.py b/xopt/tests/generators/bayesian/test_bayesian_exploration.py index ca881ead0..bcefc73e0 100644 --- a/xopt/tests/generators/bayesian/test_bayesian_exploration.py +++ b/xopt/tests/generators/bayesian/test_bayesian_exploration.py @@ -74,7 +74,7 @@ def test_in_xopt(self): gen.n_monte_carlo_samples = 1 gen.data = TEST_VOCS_DATA_MO - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) # now use bayes opt X.step() @@ -92,7 +92,7 @@ def test_with_turbo(self, use_cuda): ) set_options(gen, use_cuda, add_data=True) - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) # now use bayes opt X.step() @@ -109,7 +109,7 @@ def test_interpolation(self, use_cuda): set_options(gen, use_cuda) gen.n_interpolate_points = 5 - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) X.add_data(TEST_VOCS_DATA_MO) # now use bayes opt diff --git a/xopt/tests/generators/bayesian/test_bayesian_generator.py b/xopt/tests/generators/bayesian/test_bayesian_generator.py index ae4c7104f..0d1e61fbf 100644 --- a/xopt/tests/generators/bayesian/test_bayesian_generator.py +++ b/xopt/tests/generators/bayesian/test_bayesian_generator.py @@ -260,7 +260,7 @@ def test_get_model_w_conditions(self): def test_transforms(self): gen = PatchBayesianGenerator(vocs=sinusoid_vocs) evaluator = Evaluator(function=evaluate_sinusoid) - X = Xopt(generator=gen, evaluator=evaluator, vocs=sinusoid_vocs) + X = Xopt(generator=gen, evaluator=evaluator) # generate some data samples import numpy as np diff --git a/xopt/tests/generators/bayesian/test_expected_improvement.py b/xopt/tests/generators/bayesian/test_expected_improvement.py index f5656c270..166cfa3f5 100644 --- a/xopt/tests/generators/bayesian/test_expected_improvement.py +++ b/xopt/tests/generators/bayesian/test_expected_improvement.py @@ -72,7 +72,7 @@ def test_in_xopt(self): ) set_options(gen) - xopt = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + xopt = Xopt(generator=gen, evaluator=evaluator) # initialize with single initial candidate xopt.random_evaluate(3) diff --git a/xopt/tests/generators/bayesian/test_high_level.py b/xopt/tests/generators/bayesian/test_high_level.py index 7ebac5d2f..617b9b775 100644 --- a/xopt/tests/generators/bayesian/test_high_level.py +++ b/xopt/tests/generators/bayesian/test_high_level.py @@ -49,18 +49,17 @@ def test_constrained_mobo(self): numerical_optimizer: name: LBFGS n_restarts: 1 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """ X = Xopt.from_yaml(YAML) X.random_evaluate(3) # generates random data @@ -76,14 +75,14 @@ def test_mobo(self): numerical_optimizer: name: LBFGS n_restarts: 2 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: {} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: {} """ X = Xopt.from_yaml(YAML) X.random_evaluate(3) # generates random data @@ -101,14 +100,14 @@ def test_restart_torch_inline_serialization(self): numerical_optimizer: name: LBFGS n_restarts: 1 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: {} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: {} """ X = Xopt.from_yaml(YAML) X.random_evaluate(3) @@ -146,14 +145,14 @@ def test_restart_torch_serialization(self): numerical_optimizer: name: LBFGS n_restarts: 1 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: {} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: {} """ X = Xopt.from_yaml(YAML) X.random_evaluate(3) @@ -187,16 +186,15 @@ def test_restart(self): numerical_optimizer: name: LBFGS n_restarts: 1 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: {} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: {} """ X = Xopt.from_yaml(YAML) X.random_evaluate(3) diff --git a/xopt/tests/generators/bayesian/test_mggpo.py b/xopt/tests/generators/bayesian/test_mggpo.py index 0f581d406..648179694 100644 --- a/xopt/tests/generators/bayesian/test_mggpo.py +++ b/xopt/tests/generators/bayesian/test_mggpo.py @@ -67,7 +67,7 @@ def test_serial(self): vocs = deepcopy(tnk_vocs) reference_point = {"y1": 3.14, "y2": 3.14} gen = MGGPOGenerator(vocs=vocs, reference_point=reference_point) - X = Xopt(evaluator=evaluator, generator=gen, vocs=vocs) + X = Xopt(evaluator=evaluator, generator=gen) X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]})) samples = X.generator.generate(10) assert pd.DataFrame(samples).to_numpy().shape == (10, 2) @@ -83,8 +83,8 @@ def test_bactched(self): reference_point = {"y1": 3.14, "y2": 3.14} gen = MGGPOGenerator(vocs=vocs, reference_point=reference_point) - X = Xopt(evaluator=evaluator, generator=gen, vocs=vocs) - X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]})) + X = Xopt(evaluator=evaluator, generator=gen) + X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))) for _ in [0, 1]: X.step() diff --git a/xopt/tests/generators/bayesian/test_mobo.py b/xopt/tests/generators/bayesian/test_mobo.py index 5b17f1e05..67ee5cf26 100644 --- a/xopt/tests/generators/bayesian/test_mobo.py +++ b/xopt/tests/generators/bayesian/test_mobo.py @@ -123,7 +123,7 @@ def test_script(self): for ele in [gen]: dump = ele.model_dump() generator = MOBOGenerator(vocs=tnk_vocs, **dump) - X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs) + X = Xopt(generator=generator, evaluator=evaluator) X.random_evaluate(3) X.step() @@ -325,7 +325,7 @@ def test_log_mobo(self, use_cuda): for ele in [gen]: dump = ele.model_dump() generator = MOBOGenerator(vocs=tnk_vocs, **dump) - X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs) + X = Xopt(generator=generator, evaluator=evaluator) X.generator.numerical_optimizer.max_iter = 1 X.random_evaluate(3) X.step() diff --git a/xopt/tests/generators/bayesian/test_turbo.py b/xopt/tests/generators/bayesian/test_turbo.py index 31aed961d..38a937a1f 100644 --- a/xopt/tests/generators/bayesian/test_turbo.py +++ b/xopt/tests/generators/bayesian/test_turbo.py @@ -413,7 +413,7 @@ def sin_function(input_dict): generator = UpperConfidenceBoundGenerator( vocs=vocs, turbo_controller="optimize" ) - X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs) + X = Xopt(evaluator=evaluator, generator=generator) X.evaluate_data(pd.DataFrame({"x": [3.0, 1.75, 2.0]})) @@ -489,7 +489,6 @@ def test_serialization(self): X = Xopt( evaluator=evaluator, generator=generator, - vocs=vocs, dump_file="dump.yml", ) @@ -534,7 +533,7 @@ def basic_sin_function(input_dict): evaluator = Evaluator(function=basic_sin_function) # construct Xopt optimizer - X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs) + X = Xopt(evaluator=evaluator, generator=generator) X.random_evaluate(3) diff --git a/xopt/tests/generators/bayesian/test_upper_confidence_bound.py b/xopt/tests/generators/bayesian/test_upper_confidence_bound.py index eab867733..e7a7a522f 100644 --- a/xopt/tests/generators/bayesian/test_upper_confidence_bound.py +++ b/xopt/tests/generators/bayesian/test_upper_confidence_bound.py @@ -97,7 +97,7 @@ def test_get_optimum(self, use_cuda): ) set_options(gen, use_cuda=False, add_data=True) evaluator = Evaluator(function=xtest_callable) - X = Xopt(generator=gen, evaluator=evaluator, vocs=vocs) + X = Xopt(generator=gen, evaluator=evaluator) X.random_evaluate(10) for _ in range(1): X.step() @@ -121,7 +121,7 @@ def test_in_xopt(self): ) set_options(gen) - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) X.random_evaluate(5) for _ in range(2): X.step() @@ -165,18 +165,18 @@ def test_negative_acq_values_warning(self): X = Xopt.from_yaml( """ generator: - name: upper_confidence_bound - + name: upper_confidence_bound + vocs: + variables: + x1: [0, 6.28] + constraints: + c1: [LESS_THAN, 0.0] + objectives: + y1: 'MAXIMIZE' evaluator: function: xopt.resources.test_functions.sinusoid_1d.evaluate_sinusoid - vocs: - variables: - x1: [0, 6.28] - constraints: - c1: [LESS_THAN, 0.0] - objectives: - y1: 'MAXIMIZE' + """ ) _ = X.random_evaluate(10, seed=0) diff --git a/xopt/tests/generators/bayesian/test_utils.py b/xopt/tests/generators/bayesian/test_utils.py index 8cf07e23f..96abb8bcd 100644 --- a/xopt/tests/generators/bayesian/test_utils.py +++ b/xopt/tests/generators/bayesian/test_utils.py @@ -105,7 +105,7 @@ def test_model_jit(self, use_cuda): vocs=vocs, ) gen.use_cuda = use_cuda - X = Xopt(generator=gen, evaluator=evaluator, vocs=vocs) + X = Xopt(generator=gen, evaluator=evaluator) gen = X.generator X.random_evaluate(200) gen.train_model() @@ -173,7 +173,7 @@ def test_model_compile(self, use_cuda): gen.use_cuda = use_cuda gen.numerical_optimizer.n_restarts = 2 gen.n_monte_carlo_samples = 4 - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) X.random_evaluate(100) for _ in range(1): X.step() @@ -283,7 +283,7 @@ def test_acqf_compile(self, use_cuda): gen.use_cuda = use_cuda gen.numerical_optimizer.n_restarts = 3 gen.n_monte_carlo_samples = 4 - X = Xopt(generator=gen, evaluator=evaluator, vocs=vocs) + X = Xopt(generator=gen, evaluator=evaluator) X.random_evaluate(200) for _ in range(1): X.step() @@ -397,7 +397,7 @@ def test_torch_trace_acqf(self): ) gen.numerical_optimizer.n_restarts = 2 gen.n_monte_carlo_samples = 4 - X = Xopt(generator=gen, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=gen, evaluator=evaluator) X.random_evaluate(100) for _ in range(1): X.step() diff --git a/xopt/tests/generators/bayesian/test_visualize.py b/xopt/tests/generators/bayesian/test_visualize.py index 14bd04dad..6ac65f175 100644 --- a/xopt/tests/generators/bayesian/test_visualize.py +++ b/xopt/tests/generators/bayesian/test_visualize.py @@ -272,7 +272,7 @@ def test_in_generator(): generator.numerical_optimizer.max_iter = 1 generator.gp_constructor.use_low_noise_prior = True - X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs) + X = Xopt(generator=generator, evaluator=evaluator) with pytest.raises(ValueError): X.generator.visualize_model() diff --git a/xopt/tests/generators/external/test_aposmm.py b/xopt/tests/generators/external/test_aposmm.py index 63d9799a8..881a37519 100644 --- a/xopt/tests/generators/external/test_aposmm.py +++ b/xopt/tests/generators/external/test_aposmm.py @@ -64,7 +64,6 @@ def test_init(self, vocs, evaluator, max_evaluations, mapping): generator=gen, evaluator=evaluator, stopping_condition=max_evaluations, - vocs=vocs, ) def test_run(self, vocs, evaluator, max_evaluations, mapping): @@ -77,7 +76,6 @@ def test_run(self, vocs, evaluator, max_evaluations, mapping): x = Xopt( generator=gen, evaluator=evaluator, - vocs=vocs, stopping_condition=max_evaluations, ) x.run() @@ -96,7 +94,6 @@ def test_random_evaluate(self, vocs, evaluator, max_evaluations, mapping): x = Xopt( generator=gen, evaluator=evaluator, - vocs=vocs, stopping_condition=max_evaluations, ) x.random_evaluate(40) @@ -117,7 +114,6 @@ def test_step(self, vocs, evaluator, max_evaluations, mapping): x = Xopt( generator=gen, evaluator=evaluator, - vocs=vocs, stopping_condition=max_evaluations, ) for i in range(45): diff --git a/xopt/tests/generators/ga/test_cnsga.py b/xopt/tests/generators/ga/test_cnsga.py index 1072862c2..aea06c678 100644 --- a/xopt/tests/generators/ga/test_cnsga.py +++ b/xopt/tests/generators/ga/test_cnsga.py @@ -21,7 +21,6 @@ def test_cnsga(): X = Xopt( generator=CNSGAGenerator(vocs=tnk_vocs), evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition(max_evaluations=10), ) X.run() @@ -34,7 +33,6 @@ def test_cnsga_single_objective(): X = Xopt( generator=CNSGAGenerator(vocs=modified_tnk_vocs), evaluator=Evaluator(function=evaluate_modified_TNK), - vocs=modified_tnk_vocs, stopping_condition=MaxEvaluationsCondition(max_evaluations=5), ) X.run() @@ -49,7 +47,6 @@ def eval_f(x): X = Xopt( generator=CNSGAGenerator(vocs=tnk_vocs, population_size=32), evaluator=Evaluator(function=eval_f), - vocs=tnk_vocs, strict=False, ) @@ -92,6 +89,15 @@ def test_cnsga_from_yaml(): name: cnsga population_size: 8 population_file: null + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: dummy_constant} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK @@ -99,15 +105,7 @@ def test_cnsga_from_yaml(): sleep: 0 random_sleep: 0.1 - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: dummy_constant} + """ X = Xopt(YAML) @@ -128,6 +126,13 @@ def test_cnsga_no_constraints(): name: cnsga population_size: 8 population_file: null + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: {} + constants: {a: dummy_constant} evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK @@ -135,13 +140,7 @@ def test_cnsga_no_constraints(): sleep: 0 random_sleep: 0.1 - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: {} - constants: {a: dummy_constant} + """ X = Xopt(YAML) diff --git a/xopt/tests/generators/ga/test_nsga2.py b/xopt/tests/generators/ga/test_nsga2.py index 60e9353c2..4ab02b823 100644 --- a/xopt/tests/generators/ga/test_nsga2.py +++ b/xopt/tests/generators/ga/test_nsga2.py @@ -33,7 +33,6 @@ def test_nsga2(): X = Xopt( generator=NSGA2Generator(vocs=tnk_vocs), evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition(max_evaluations=5), ) X.run() @@ -46,7 +45,6 @@ def test_nsga2_single_objective(): X = Xopt( generator=NSGA2Generator(vocs=modified_tnk_vocs), evaluator=Evaluator(function=evaluate_modified_TNK), - vocs=modified_tnk_vocs, stopping_condition=MaxEvaluationsCondition(max_evaluations=5), ) @@ -69,7 +67,6 @@ def test_nsga2_output_data(): X = Xopt( generator=generator, evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition( max_evaluations=30 ), # Run for 3 generations @@ -234,7 +231,6 @@ def nsga2_optimization_with_checkpoint(): X = Xopt( generator=generator, evaluator=Evaluator(function=evaluate_TNK), - vocs=vocs, stopping_condition=MaxEvaluationsCondition( max_evaluations=20 ), # Run for 2 generations @@ -283,7 +279,6 @@ def test_nsga2_checkpoint_reload_python(nsga2_optimization_with_checkpoint): X_restored = Xopt( generator=restored_generator, evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition( max_evaluations=10 ), # Run for 1 more generation @@ -315,27 +310,28 @@ def test_nsga2_checkpoint_reload_yaml(nsga2_optimization_with_checkpoint): max_evaluations: 20 generator: - name: nsga2 - checkpoint_file: {latest_checkpoint} + name: nsga2 + checkpoint_file: {latest_checkpoint} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] - evaluator: - function: xopt.resources.test_functions.tnk.evaluate_TNK + objectives: + y1: MINIMIZE + y2: MINIMIZE - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] - objectives: - y1: MINIMIZE - y2: MINIMIZE + constants: + a: dummy_constant + + evaluator: + function: xopt.resources.test_functions.tnk.evaluate_TNK - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: - a: dummy_constant """.replace("\n ", "\n") # Reload from YAML, grab generator @@ -391,24 +387,25 @@ def test_nsga2_checkpoint_reload_vocs_var_bounds_expand( my_xopt = Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [-10.0, 10.0] + x2: [-10.0, 10.0] - evaluator: - function: xopt.resources.test_functions.tnk.evaluate_TNK + objectives: + y1: MINIMIZE + y2: MINIMIZE - vocs: - variables: - x1: [-10.0, 10.0] - x2: [-10.0, 10.0] + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + + evaluator: + function: xopt.resources.test_functions.tnk.evaluate_TNK - objectives: - y1: MINIMIZE - y2: MINIMIZE - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -427,24 +424,25 @@ def test_nsga2_checkpoint_reload_vocs_var_bounds_shrink( my_xopt = Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [-10.0, -5.0] + x2: [-10.0, -5.0] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [-10.0, -5.0] - x2: [-10.0, -5.0] - objectives: - y1: MINIMIZE - y2: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -457,24 +455,24 @@ def test_nsga2_checkpoint_reload_vocs_obj_dir(nsga2_optimization_with_checkpoint Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: + y1: MAXIMIZE + y2: MAXIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - - objectives: - y1: MAXIMIZE - y2: MAXIMIZE - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -485,24 +483,24 @@ def test_nsga2_checkpoint_reload_vocs_constraint_conf( Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [LESS_THAN, 0.123] + c2: [GREATER_THAN, 0.321] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - - objectives: - y1: MINIMIZE - y2: MINIMIZE - constraints: - c1: [LESS_THAN, 0.123] - c2: [GREATER_THAN, 0.321] """.replace("\n ", "\n") ) @@ -511,25 +509,25 @@ def test_nsga2_checkpoint_reload_vocs_new_var(nsga2_optimization_with_checkpoint Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + my_const1: [0.0, 1.0] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - my_const1: [0.0, 1.0] - - objectives: - y1: MINIMIZE - y2: MINIMIZE - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -538,25 +536,25 @@ def test_nsga2_checkpoint_reload_vocs_new_obj(nsga2_optimization_with_checkpoint Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + my_const1: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: - y1: MINIMIZE - y2: MINIMIZE - my_const1: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -565,25 +563,25 @@ def test_nsga2_checkpoint_reload_vocs_new_const(nsga2_optimization_with_checkpoi Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + my_const1: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: - y1: MINIMIZE - y2: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - my_const1: [LESS_THAN, 0.5] """.replace("\n ", "\n") ) @@ -593,25 +591,25 @@ def test_nsga2_checkpoint_reload_vocs_bad_var(nsga2_optimization_with_checkpoint Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + does_not_exist: [0.0, 1.0] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - does_not_exist: [0.0, 1.0] - - objectives: - y1: MINIMIZE - y2: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] + """.replace("\n ", "\n") ) @@ -621,25 +619,25 @@ def test_nsga2_checkpoint_reload_vocs_bad_obj(nsga2_optimization_with_checkpoint Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + does_not_exist: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - - objectives: - y1: MINIMIZE - y2: MINIMIZE - does_not_exist: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] + """.replace("\n ", "\n") ) @@ -649,25 +647,25 @@ def test_nsga2_checkpoint_reload_vocs_bad_const(nsga2_optimization_with_checkpoi Xopt.from_yaml( f""" generator: - name: nsga2 - checkpoint_file: {nsga2_optimization_with_checkpoint[1]} - + name: nsga2 + checkpoint_file: {nsga2_optimization_with_checkpoint[1]} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + + objectives: + y1: MINIMIZE + y2: MINIMIZE + + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + does_not_exist: [LESS_THAN, 0.5] evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - - objectives: - y1: MINIMIZE - y2: MINIMIZE - - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - does_not_exist: [LESS_THAN, 0.5] + """.replace("\n ", "\n") ) @@ -688,7 +686,6 @@ def test_nsga2_all_individuals_in_data(): X = Xopt( generator=generator, evaluator=Evaluator(function=evaluate_TNK, max_workers=1), - vocs=tnk_vocs, ) for _ in range(30): X.step() @@ -762,7 +759,6 @@ def compare(val_a, val_b): population_size=pop_size, ), evaluator=Evaluator(function=problem_func), - vocs=problem_vocs, ) # Run the first step to initialize @@ -905,7 +901,6 @@ def test_nsga2_output_inhomogenous_data(): X = Xopt( generator=generator, evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition( max_evaluations=30 ), # Run for 3 generations @@ -961,7 +956,6 @@ def test_nsga2_vocs_not_present_in_add_data(): X = Xopt( generator=NSGA2Generator(vocs=tnk_vocs), evaluator=Evaluator(function=evaluate_TNK), - vocs=tnk_vocs, stopping_condition=MaxEvaluationsCondition(max_evaluations=10), ) X.run() diff --git a/xopt/tests/generators/sequential/test_extremum_seeking.py b/xopt/tests/generators/sequential/test_extremum_seeking.py index d7ceb0581..5de9a9e99 100644 --- a/xopt/tests/generators/sequential/test_extremum_seeking.py +++ b/xopt/tests/generators/sequential/test_extremum_seeking.py @@ -267,7 +267,7 @@ def f_ES_minimize(input_dict): evaluator = Evaluator(function=f_ES_minimize) generator = ExtremumSeekingGenerator(vocs=vocs) - X = Xopt(vocs=vocs, evaluator=evaluator, generator=generator) + X = Xopt(evaluator=evaluator, generator=generator) X.evaluate_data({name: val for name, val in zip(vocs.variable_names, pES[0])}) for i in range(ES_steps): @@ -288,7 +288,7 @@ def eval_f(x): evaluator = Evaluator(function=eval_f) generator = ExtremumSeekingGenerator(vocs=vocs) for ele in [-1.0, 1.0]: - X = Xopt(vocs=vocs, evaluator=evaluator, generator=generator) + X = Xopt(evaluator=evaluator, generator=generator) X.evaluate_data( {name: val for name, val in zip(vocs.variable_names, ele * np.ones(3))} diff --git a/xopt/tests/generators/sequential/test_neldermead.py b/xopt/tests/generators/sequential/test_neldermead.py index c611fc941..11f21152e 100644 --- a/xopt/tests/generators/sequential/test_neldermead.py +++ b/xopt/tests/generators/sequential/test_neldermead.py @@ -54,14 +54,15 @@ def test_simplex_generate(self): generator: name: neldermead adaptive: true + vocs: + variables: + x0: [-5, 5] + x1: [-5, 5] + x2: [-5, 5] + objectives: {y: MINIMIZE} evaluator: function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock - vocs: - variables: - x0: [-5, 5] - x1: [-5, 5] - x2: [-5, 5] - objectives: {y: MINIMIZE} + """ X = Xopt.from_yaml(YAML) X.random_evaluate(1) @@ -106,13 +107,14 @@ def test_simplex_forced_init(self): name: neldermead initial_point: {x0: -1, x1: -1} adaptive: true + vocs: + variables: + x0: [-5, 5] + x1: [-5, 5] + objectives: {y: MINIMIZE} evaluator: function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock - vocs: - variables: - x0: [-5, 5] - x1: [-5, 5] - objectives: {y: MINIMIZE} + """ # test where we first random evaluate a point before starting simplex -- simplex will still start with the initial point @@ -203,13 +205,14 @@ def test_simplex_agreement(self): name: neldermead initial_point: {x0: -1, x1: -1} adaptive: true + vocs: + variables: + x0: [-5, 5] + x1: [-5, 5] + objectives: {y: MINIMIZE} evaluator: function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock - vocs: - variables: - x0: [-5, 5] - x1: [-5, 5] - objectives: {y: MINIMIZE} + """ X = Xopt.from_yaml(YAML) X.run() diff --git a/xopt/tests/generators/sequential/test_rcds.py b/xopt/tests/generators/sequential/test_rcds.py index eac61a9d8..559bbeb6c 100644 --- a/xopt/tests/generators/sequential/test_rcds.py +++ b/xopt/tests/generators/sequential/test_rcds.py @@ -75,14 +75,15 @@ def test_rcds_yaml(self): init_mat: null noise: 0.00001 step: 0.01 + vocs: + variables: + x1: [0, 1] + x2: [0, 1] + objectives: + y1: MINIMIZE evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK - vocs: - variables: - x1: [0, 1] - x2: [0, 1] - objectives: - y1: MINIMIZE + """ X = Xopt.from_yaml(YAML) X.random_evaluate(1) @@ -111,7 +112,7 @@ def test_rcds_convergence(self, fun, obj, x_opt, max_iter): vocs = VOCS(variables=variables, objectives=objectives) generator = RCDSGenerator(step=0.01, noise=0.00001, vocs=vocs) evaluator = Evaluator(function=fun) - X = Xopt(vocs=vocs, evaluator=evaluator, generator=generator) + X = Xopt(evaluator=evaluator, generator=generator) if x_opt.sum(): # if the optimal solution is not 0 X.evaluate_data({f"x{i}": 1.2 for i in range(len(x_opt))}) @@ -297,7 +298,7 @@ def test_rcds_nan_handling(self): test_vocs.objectives = {"y1": "MINIMIZE"} generator = RCDSGenerator(step=0.01, noise=0.00001, vocs=test_vocs) evaluator = Evaluator(function=eval_f_linear_pos_nans) - X = Xopt(vocs=test_vocs, evaluator=evaluator, generator=generator) + X = Xopt(evaluator=evaluator, generator=generator) X.evaluate_data({"x0": 1.0, "x1": 1.0}) for i in range(50): diff --git a/xopt/tests/generators/sequential/test_serialization.py b/xopt/tests/generators/sequential/test_serialization.py index f1b38292d..d33bcb288 100644 --- a/xopt/tests/generators/sequential/test_serialization.py +++ b/xopt/tests/generators/sequential/test_serialization.py @@ -31,7 +31,7 @@ def test_serialization_and_restart(self, generator): evaluator = Evaluator(function=sin_function) gen = generator(vocs=test_vocs) - X = Xopt(vocs=test_vocs, evaluator=evaluator, generator=gen) + X = Xopt(evaluator=evaluator, generator=gen) X.random_evaluate(1) for i in range(10): diff --git a/xopt/tests/test_evaluator.py b/xopt/tests/test_evaluator.py index 575b5ee76..8029c30f4 100644 --- a/xopt/tests/test_evaluator.py +++ b/xopt/tests/test_evaluator.py @@ -162,7 +162,7 @@ def e(input): evaluator = Evaluator(function=func) generator = RandomGenerator(vocs=vocs) - X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs) + X = Xopt(evaluator=evaluator, generator=generator) X.random_evaluate(5) diff --git a/xopt/tests/test_io.py b/xopt/tests/test_io.py index 94660ed75..c2d4df52d 100644 --- a/xopt/tests/test_io.py +++ b/xopt/tests/test_io.py @@ -14,7 +14,7 @@ class Test_IO: def test_options_to_dict(self): evaluator = Evaluator(function=dummy) generator = RandomGenerator(vocs=TEST_VOCS_BASE) - X = Xopt(generator=generator, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=generator, evaluator=evaluator) print(X.model_dump_json()) print(X.to_json(base_key="bk")) @@ -22,7 +22,7 @@ def test_state_to_dict(self): evaluator = Evaluator(function=dummy) generator = RandomGenerator(vocs=TEST_VOCS_BASE) - X = Xopt(generator=generator, evaluator=evaluator, vocs=TEST_VOCS_BASE) + X = Xopt(generator=generator, evaluator=evaluator) state_dict = X.dict() assert state_dict["generator"]["name"] == generator.name print(state_dict) diff --git a/xopt/tests/test_numerical_optimizer.py b/xopt/tests/test_numerical_optimizer.py index 938ea64ae..1f92fa524 100644 --- a/xopt/tests/test_numerical_optimizer.py +++ b/xopt/tests/test_numerical_optimizer.py @@ -92,7 +92,7 @@ def test_in_xopt(self): evaluator = Evaluator(function=evaluate_TNK) - X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs) + X = Xopt(generator=generator, evaluator=evaluator) X.evaluate_data( pd.DataFrame({"x1": [1.0, 0.75, 3.14, 0], "x2": [0.7, 0.95, 0, 3.14]}) diff --git a/xopt/tests/test_perf.py b/xopt/tests/test_perf.py index cfaf614be..bcf7ec858 100644 --- a/xopt/tests/test_perf.py +++ b/xopt/tests/test_perf.py @@ -24,9 +24,9 @@ def test_xopt_overhead(self): config = { "generator": { "name": "random", + "vocs": VOCS_15D, }, "evaluator": {"function": "xopt.resources.testing.xtest_callable"}, - "vocs": VOCS_15D, } X = Xopt.from_dict(config) diff --git a/xopt/tests/test_stopping_condition.py b/xopt/tests/test_stopping_condition.py index e58667d0c..763209817 100644 --- a/xopt/tests/test_stopping_condition.py +++ b/xopt/tests/test_stopping_condition.py @@ -270,7 +270,6 @@ def test_conditions_in_xopt(self, simple_vocs, evaluator, condition): generator = NelderMeadGenerator(vocs=simple_vocs) X = Xopt( - vocs=simple_vocs, evaluator=evaluator, generator=generator, stopping_condition=condition, @@ -292,7 +291,6 @@ def test_no_stopping_condition_raises_error(self, simple_vocs, evaluator): generator = NelderMeadGenerator(vocs=simple_vocs) X = Xopt( - vocs=simple_vocs, evaluator=evaluator, generator=generator, # No stopping_condition diff --git a/xopt/tests/test_xopt.py b/xopt/tests/test_xopt.py index fde48eb1d..6902af5ac 100644 --- a/xopt/tests/test_xopt.py +++ b/xopt/tests/test_xopt.py @@ -48,7 +48,7 @@ def dummy(x): evaluator = Evaluator(function=dummy) gen = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) - Xopt(generator=gen, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE)) + Xopt(generator=gen, evaluator=evaluator) # init with yaml YAML = """ @@ -61,16 +61,15 @@ def dummy(x): generator: name: random - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: 0} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: 0} """ X = Xopt.from_yaml(YAML) @@ -124,14 +123,12 @@ def check_all(X, length): X1 = Xopt( generator=RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)), evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), data=pd.DataFrame(test_data, index=["foo", 0.25, 1]), ) X1 = Xopt( generator=RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)), evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), data=pd.DataFrame(test_data, index=[1, 2, 3]), ) check_all(X1, 3) @@ -139,7 +136,6 @@ def check_all(X, length): X1 = Xopt( generator=RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)), evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) check_all(X1, 0) @@ -185,17 +181,16 @@ def test_bad_vocs(self): generator: name: random - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: dummy_constant} - bad_val: 5 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: dummy_constant} + bad_val: 5 """ with pytest.raises(ValidationError): @@ -206,7 +201,7 @@ def test_evaluate(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) out = xopt.evaluate({"x1": 0.4, "x2": 0.3}) @@ -220,7 +215,7 @@ def test_evaluate(self): evaluator = Evaluator(function=xtest_callable) generator = RandomGenerator(vocs=test_vocs) - xopt = Xopt(generator=generator, evaluator=evaluator, vocs=test_vocs) + xopt = Xopt(generator=generator, evaluator=evaluator) out = xopt.evaluate({"x2": 0.2}) assert isinstance(out, dict) @@ -234,7 +229,7 @@ def test_evaluate_data(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) # test evaluating data w/o constants specified @@ -262,7 +257,7 @@ def test_str_method(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) # fixed seed for deterministic results @@ -292,7 +287,6 @@ def g(x, a=True): X = Xopt( generator=generator, evaluator=evaluator, - vocs=vocs, strict=True, ) with pytest.raises(XoptError): @@ -304,7 +298,6 @@ def g(x, a=True): X2 = Xopt( generator=generator, evaluator=evaluator, - vocs=vocs, strict=True, ) with pytest.raises(XoptError): @@ -316,7 +309,6 @@ def test_submit_bad_data(self): X = Xopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) with pytest.raises(ValueError): X.evaluate_data(pd.DataFrame({"x1": [0.0, 5.0], "x2": [-3.0, 1.0]})) @@ -327,7 +319,6 @@ def test_add_data(self): X = Xopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) assert X.generator.data is None X.add_data(pd.DataFrame({"x1": [0.0, 1.0], "x2": [0.0, 1.0]})) @@ -342,7 +333,6 @@ def test_remove_data(self): X = Xopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) X.add_data(pd.DataFrame({"x1": [0.0, 1.0], "x2": [0.0, 1.0]})) with pytest.raises(KeyError): @@ -359,7 +349,6 @@ def test_asynch(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) n_steps = 5 for i in range(n_steps): @@ -374,7 +363,6 @@ def test_asynch(self): X2 = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) n_steps = 5 @@ -390,13 +378,13 @@ def bad_function(inval): evaluator = Evaluator(function=bad_function) gen = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) - X = Xopt(generator=gen, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE)) + X = Xopt(generator=gen, evaluator=evaluator) # should raise an error (default) with pytest.raises(XoptError): X.step() - X2 = Xopt(generator=gen, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE)) + X2 = Xopt(generator=gen, evaluator=evaluator) X2.strict = False X2.random_evaluate(10) @@ -416,7 +404,7 @@ def bad_function_sometimes(inval): evaluator = Evaluator(function=bad_function_sometimes) gen = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = AsynchronousXopt( - generator=gen, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=gen, evaluator=evaluator, ) X.strict = False @@ -433,7 +421,7 @@ def test_dump_w_exploded_cols(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) X.dump_file = "test_checkpointing.yaml" @@ -465,7 +453,7 @@ def test_checkpointing(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) X.dump_file = "test_checkpointing.yaml" @@ -487,7 +475,7 @@ def test_random_evaluate(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) # fixed seed for deterministic results @@ -501,10 +489,10 @@ def test_copying_generators(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) X2 = Xopt( - generator=generator, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) + generator=generator, evaluator=evaluator, ) assert X.generator is not X2.generator From 4e7307ffb7c873338c28e0c813841a0ee14b6650 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 11:36:29 -0600 Subject: [PATCH 03/16] fix issues --- xopt/tests/generators/bayesian/test_mggpo.py | 2 +- xopt/tests/generators/ga/test_nsga2.py | 2 +- xopt/tests/test_xopt.py | 27 +++++++++++++------- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/xopt/tests/generators/bayesian/test_mggpo.py b/xopt/tests/generators/bayesian/test_mggpo.py index 648179694..10758c60a 100644 --- a/xopt/tests/generators/bayesian/test_mggpo.py +++ b/xopt/tests/generators/bayesian/test_mggpo.py @@ -84,7 +84,7 @@ def test_bactched(self): gen = MGGPOGenerator(vocs=vocs, reference_point=reference_point) X = Xopt(evaluator=evaluator, generator=gen) - X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))) + X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]})) for _ in [0, 1]: X.step() diff --git a/xopt/tests/generators/ga/test_nsga2.py b/xopt/tests/generators/ga/test_nsga2.py index 4ab02b823..aacd58e30 100644 --- a/xopt/tests/generators/ga/test_nsga2.py +++ b/xopt/tests/generators/ga/test_nsga2.py @@ -327,7 +327,7 @@ def test_nsga2_checkpoint_reload_yaml(nsga2_optimization_with_checkpoint): constants: a: dummy_constant - + evaluator: function: xopt.resources.test_functions.tnk.evaluate_TNK diff --git a/xopt/tests/test_xopt.py b/xopt/tests/test_xopt.py index 6902af5ac..49b185acf 100644 --- a/xopt/tests/test_xopt.py +++ b/xopt/tests/test_xopt.py @@ -201,7 +201,8 @@ def test_evaluate(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) out = xopt.evaluate({"x1": 0.4, "x2": 0.3}) @@ -229,7 +230,8 @@ def test_evaluate_data(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) # test evaluating data w/o constants specified @@ -257,7 +259,8 @@ def test_str_method(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) # fixed seed for deterministic results @@ -404,7 +407,8 @@ def bad_function_sometimes(inval): evaluator = Evaluator(function=bad_function_sometimes) gen = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = AsynchronousXopt( - generator=gen, evaluator=evaluator, + generator=gen, + evaluator=evaluator, ) X.strict = False @@ -421,7 +425,8 @@ def test_dump_w_exploded_cols(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) X.dump_file = "test_checkpointing.yaml" @@ -453,7 +458,8 @@ def test_checkpointing(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) X.dump_file = "test_checkpointing.yaml" @@ -475,7 +481,8 @@ def test_random_evaluate(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) xopt = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) # fixed seed for deterministic results @@ -489,10 +496,12 @@ def test_copying_generators(self): generator = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) X = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) X2 = Xopt( - generator=generator, evaluator=evaluator, + generator=generator, + evaluator=evaluator, ) assert X.generator is not X2.generator From 10ef5cabb8ccdb97b5bc4ed50dbecd864231909b Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:02:50 -0600 Subject: [PATCH 04/16] fix tests --- xopt/resources/testing.py | 4 ++-- xopt/tests/generators/bayesian/test_bayesian_generator.py | 2 +- xopt/tests/generators/bayesian/test_mobo.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/xopt/resources/testing.py b/xopt/resources/testing.py index cc4133429..77cfdf7e5 100644 --- a/xopt/resources/testing.py +++ b/xopt/resources/testing.py @@ -223,7 +223,7 @@ def check_dict_allclose(dict1, dict2, excluded_keys=None, rtol=1e-5, atol=1e-8): def reload_gen_from_json(gen): assert isinstance(gen, Generator) gen_class = gen.__class__ - gen_new = gen_class(vocs=gen.vocs, **json.loads(gen.json())) + gen_new = gen_class(**json.loads(gen.json())) gen_new.add_data(gen.data.copy()) return gen_new @@ -231,7 +231,7 @@ def reload_gen_from_json(gen): def reload_gen_from_yaml(gen): assert isinstance(gen, Generator) gen_class = gen.__class__ - gen_new = gen_class(vocs=gen.vocs, **remove_none_values(yaml.safe_load(gen.yaml()))) + gen_new = gen_class(**remove_none_values(yaml.safe_load(gen.yaml()))) gen_new.add_data(gen.data.copy()) return gen_new diff --git a/xopt/tests/generators/bayesian/test_bayesian_generator.py b/xopt/tests/generators/bayesian/test_bayesian_generator.py index 0d1e61fbf..db1cc819c 100644 --- a/xopt/tests/generators/bayesian/test_bayesian_generator.py +++ b/xopt/tests/generators/bayesian/test_bayesian_generator.py @@ -141,7 +141,7 @@ def test_get_model(self): gen = deepcopy(gen) gen.gp_constructor.covar_modules = {"y1": PeriodicKernel()} - gen = PatchBayesianGenerator(vocs=TEST_VOCS_BASE, **gen.model_dump()) + gen = PatchBayesianGenerator(**gen.model_dump()) model = gen.train_model(test_data) assert isinstance(model.models[0].covar_module, PeriodicKernel) diff --git a/xopt/tests/generators/bayesian/test_mobo.py b/xopt/tests/generators/bayesian/test_mobo.py index 67ee5cf26..6ccd1c157 100644 --- a/xopt/tests/generators/bayesian/test_mobo.py +++ b/xopt/tests/generators/bayesian/test_mobo.py @@ -122,7 +122,7 @@ def test_script(self): for ele in [gen]: dump = ele.model_dump() - generator = MOBOGenerator(vocs=tnk_vocs, **dump) + generator = MOBOGenerator(**dump) X = Xopt(generator=generator, evaluator=evaluator) X.random_evaluate(3) X.step() @@ -324,7 +324,7 @@ def test_log_mobo(self, use_cuda): for ele in [gen]: dump = ele.model_dump() - generator = MOBOGenerator(vocs=tnk_vocs, **dump) + generator = MOBOGenerator(**dump) X = Xopt(generator=generator, evaluator=evaluator) X.generator.numerical_optimizer.max_iter = 1 X.random_evaluate(3) From 2037f1ee3a1b564e35f522bdd7a86e9b8adea5f7 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:18:45 -0600 Subject: [PATCH 05/16] update examples --- docs/examples/basic/xopt_evaluator.ipynb | 4 +-- docs/examples/basic/xopt_generator.ipynb | 4 +-- docs/examples/basic/xopt_parallel.ipynb | 23 +++++++------ .../basic/xopt_stopping_condition.ipynb | 10 +----- .../bayes_exp/bayesian_exploration.ipynb | 4 +-- .../bayesian_exploration_from_yaml.ipynb | 24 ++++++------- ...bayesian_exploration_w_interpolation.ipynb | 4 +-- .../bayesian_exploration_with_nans.ipynb | 4 +-- docs/examples/ga/cnsga_tnk.ipynb | 3 +- docs/examples/ga/nsga2/nsga2_python.ipynb | 4 +-- docs/examples/ga/nsga2/nsga2_to_cnsga.ipynb | 2 +- .../gp_model_creation/model_creation.ipynb | 2 +- .../multi_objective_bayes_opt/mggpo.ipynb | 2 +- .../multi_objective_bayes_opt/mobo.ipynb | 6 ++-- .../multi_fidelity_mobo.ipynb | 4 +-- docs/examples/scipy/latin_hypercube.ipynb | 4 +-- .../sequential/extremum_seeking.ipynb | 32 ++++++++--------- docs/examples/sequential/neldermead.ipynb | 34 +++++++++---------- docs/examples/sequential/rcds.ipynb | 13 +++---- .../bax_tutorial.ipynb | 2 +- .../benchmarking.ipynb | 2 +- .../bo_tutorial.ipynb | 6 ++-- .../constrained_bo_tutorial.ipynb | 2 +- .../custom_objective.ipynb | 6 ++-- .../fast_model_eval.ipynb | 2 +- .../fixed_features.ipynb | 6 ++-- .../hessian_kernel.ipynb | 2 +- .../heteroskedastic_noise_tutorial.ipynb | 2 +- .../interpolate_tutorial.ipynb | 6 ++-- .../multi_fidelity_simple.ipynb | 6 ++-- .../noisy_bo_tutorial.ipynb | 2 +- .../time_dependent_bo.ipynb | 2 +- .../upper_confidence_bound.ipynb | 12 +++---- .../trust_region_bo/turbo_basics.ipynb | 2 +- .../trust_region_bo/turbo_optimize.ipynb | 4 +-- .../trust_region_bo/turbo_safety.ipynb | 2 +- 36 files changed, 120 insertions(+), 129 deletions(-) diff --git a/docs/examples/basic/xopt_evaluator.ipynb b/docs/examples/basic/xopt_evaluator.ipynb index 5a5d7fbee..eb132ceb0 100644 --- a/docs/examples/basic/xopt_evaluator.ipynb +++ b/docs/examples/basic/xopt_evaluator.ipynb @@ -318,7 +318,7 @@ "outputs": [], "source": [ "X = Xopt(\n", - " generator=RandomGenerator(vocs=vocs), evaluator=Evaluator(function=f), vocs=vocs\n", + " generator=RandomGenerator(vocs=vocs), evaluator=Evaluator(function=f),\n", ")\n", "X.strict = False\n", "\n", @@ -355,7 +355,6 @@ "X2 = AsynchronousXopt(\n", " generator=RandomGenerator(vocs=vocs),\n", " evaluator=Evaluator(function=f, executor=executor, max_workers=MAX_WORKERS),\n", - " vocs=vocs,\n", ")\n", "X2.strict = False" ] @@ -407,7 +406,6 @@ "X2 = AsynchronousXopt(\n", " generator=RandomGenerator(vocs=vocs),\n", " evaluator=Evaluator(function=f, executor=executor, max_workers=MAX_WORKERS),\n", - " vocs=vocs,\n", ")\n", "X2.evaluator.vectorized = True\n", "X2.strict = False\n", diff --git a/docs/examples/basic/xopt_generator.ipynb b/docs/examples/basic/xopt_generator.ipynb index a3c98123e..485b32e31 100644 --- a/docs/examples/basic/xopt_generator.ipynb +++ b/docs/examples/basic/xopt_generator.ipynb @@ -108,7 +108,7 @@ "source": [ "evaluator = Evaluator(function=test_function)\n", "generator = generator_type(vocs=vocs)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -177,7 +177,7 @@ "\n", "\n", "my_generator = MyGenerator(vocs=vocs)\n", - "X2 = Xopt(evaluator=evaluator, vocs=vocs, generator=my_generator)\n", + "X2 = Xopt(evaluator=evaluator, generator=my_generator)\n", "\n", "for i in range(4):\n", " X2.step()" diff --git a/docs/examples/basic/xopt_parallel.ipynb b/docs/examples/basic/xopt_parallel.ipynb index b61b56df3..a487d4491 100644 --- a/docs/examples/basic/xopt_parallel.ipynb +++ b/docs/examples/basic/xopt_parallel.ipynb @@ -98,22 +98,23 @@ " name: cnsga\n", " output_path: temp\n", " population_size: 64\n", - " \n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", + " constraints:\n", + " c1: [GREATER_THAN, 0]\n", + " c2: [LESS_THAN, 0.5]\n", + " constants: {a: dummy_constant}\n", + " \n", "evaluator:\n", " function: xopt.resources.test_functions.tnk.evaluate_TNK\n", " function_kwargs:\n", " sleep: 0\n", " random_sleep: 0.1\n", " \n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", - " constraints:\n", - " c1: [GREATER_THAN, 0]\n", - " c2: [LESS_THAN, 0.5]\n", - " constants: {a: dummy_constant}\n", + "\n", "\n", "\"\"\"\n", "X = Xopt(YAML)\n", @@ -575,7 +576,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/basic/xopt_stopping_condition.ipynb b/docs/examples/basic/xopt_stopping_condition.ipynb index ae17ef6e3..9a9e77e9d 100644 --- a/docs/examples/basic/xopt_stopping_condition.ipynb +++ b/docs/examples/basic/xopt_stopping_condition.ipynb @@ -153,7 +153,6 @@ "evaluator = Evaluator(function=sphere_function)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=max_evals_condition,\n", @@ -212,7 +211,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=sphere_vocs)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=target_condition,\n", @@ -278,7 +276,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=noisy_vocs)\n", "\n", "X = Xopt(\n", - " vocs=noisy_vocs,\n", " generator=generator,\n", " evaluator=noisy_evaluator,\n", " stopping_condition=convergence_condition,\n", @@ -352,7 +349,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=sphere_vocs)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=stagnation_condition,\n", @@ -453,7 +449,6 @@ "generator = RandomGenerator(vocs=constrained_vocs)\n", "\n", "X = Xopt(\n", - " vocs=constrained_vocs,\n", " generator=generator,\n", " evaluator=constrained_evaluator,\n", " stopping_condition=feasibility_condition,\n", @@ -551,7 +546,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=sphere_vocs)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=composite_or_condition,\n", @@ -603,7 +597,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=sphere_vocs)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=composite_and_condition,\n", @@ -728,7 +721,6 @@ "generator = UpperConfidenceBoundGenerator(vocs=sphere_vocs)\n", "\n", "X = Xopt(\n", - " vocs=sphere_vocs,\n", " generator=generator,\n", " evaluator=evaluator,\n", " stopping_condition=configured_condition,\n", @@ -795,7 +787,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/bayes_exp/bayesian_exploration.ipynb b/docs/examples/bayes_exp/bayesian_exploration.ipynb index efd3d68aa..e5f8836f0 100644 --- a/docs/examples/bayes_exp/bayesian_exploration.ipynb +++ b/docs/examples/bayes_exp/bayesian_exploration.ipynb @@ -68,7 +68,7 @@ "\n", "evaluator = Evaluator(function=evaluate_TNK)\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -253,7 +253,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/bayes_exp/bayesian_exploration_from_yaml.ipynb b/docs/examples/bayes_exp/bayesian_exploration_from_yaml.ipynb index 1c02b7a75..d34b63bab 100644 --- a/docs/examples/bayes_exp/bayesian_exploration_from_yaml.ipynb +++ b/docs/examples/bayes_exp/bayesian_exploration_from_yaml.ipynb @@ -36,20 +36,20 @@ "YAML = \"\"\"\n", "generator:\n", " name: bayesian_exploration\n", - "\n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives:\n", + " y1: EXPLORE\n", + " constraints:\n", + " c1: [GREATER_THAN, 0]\n", + " c2: [LESS_THAN, 0.5]\n", + " constants: {a: dummy_constant}\n", "evaluator:\n", " function: xopt.resources.test_functions.tnk.evaluate_TNK\n", "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives:\n", - " y1: EXPLORE\n", - " constraints:\n", - " c1: [GREATER_THAN, 0]\n", - " c2: [LESS_THAN, 0.5]\n", - " constants: {a: dummy_constant}\n", + "\n", "\n", "\"\"\"" ] @@ -161,7 +161,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/bayes_exp/bayesian_exploration_w_interpolation.ipynb b/docs/examples/bayes_exp/bayesian_exploration_w_interpolation.ipynb index 17f9cd427..cedfdd7ed 100644 --- a/docs/examples/bayes_exp/bayesian_exploration_w_interpolation.ipynb +++ b/docs/examples/bayes_exp/bayesian_exploration_w_interpolation.ipynb @@ -73,7 +73,7 @@ "\n", "evaluator = Evaluator(function=evaluate_TNK)\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -291,7 +291,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/bayes_exp/bayesian_exploration_with_nans.ipynb b/docs/examples/bayes_exp/bayesian_exploration_with_nans.ipynb index baeb00753..af7c2d722 100644 --- a/docs/examples/bayes_exp/bayesian_exploration_with_nans.ipynb +++ b/docs/examples/bayes_exp/bayesian_exploration_with_nans.ipynb @@ -91,7 +91,7 @@ "generator.numerical_optimizer.n_restarts = NUM_RESTARTS\n", "\n", "evaluator = Evaluator(function=evaluate)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -183,7 +183,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/ga/cnsga_tnk.ipynb b/docs/examples/ga/cnsga_tnk.ipynb index 66757d631..c4454f258 100644 --- a/docs/examples/ga/cnsga_tnk.ipynb +++ b/docs/examples/ga/cnsga_tnk.ipynb @@ -78,7 +78,6 @@ "X = Xopt(\n", " generator=CNSGAGenerator(vocs=tnk_vocs),\n", " evaluator=ev,\n", - " vocs=tnk_vocs,\n", ")\n", "X.strict = False" ] @@ -530,7 +529,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/ga/nsga2/nsga2_python.ipynb b/docs/examples/ga/nsga2/nsga2_python.ipynb index 1859555d4..7c9d6f97e 100644 --- a/docs/examples/ga/nsga2/nsga2_python.ipynb +++ b/docs/examples/ga/nsga2/nsga2_python.ipynb @@ -98,7 +98,7 @@ "source": [ "# Run the optimizer for a few generations. Notice log output printed below this cell\n", "ev.max_workers = generator.population_size\n", - "my_xopt = Xopt(generator=generator, evaluator=ev, vocs=prob_vocs)\n", + "my_xopt = Xopt(generator=generator, evaluator=ev)\n", "\n", "for _ in range(3):\n", " my_xopt.step()" @@ -237,7 +237,7 @@ "\n", "# Run it for a couple of generations\n", "ev.max_workers = generator.population_size\n", - "my_xopt = Xopt(generator=generator, evaluator=ev, vocs=prob_vocs)\n", + "my_xopt = Xopt(generator=generator, evaluator=ev)\n", "for _ in range(32):\n", " my_xopt.step()" ] diff --git a/docs/examples/ga/nsga2/nsga2_to_cnsga.ipynb b/docs/examples/ga/nsga2/nsga2_to_cnsga.ipynb index c67331c0a..6ced86eb6 100644 --- a/docs/examples/ga/nsga2/nsga2_to_cnsga.ipynb +++ b/docs/examples/ga/nsga2/nsga2_to_cnsga.ipynb @@ -79,7 +79,7 @@ "\n", "# Run it for a couple of generations\n", "ev.max_workers = generator.population_size\n", - "X = Xopt(generator=generator, evaluator=ev, vocs=prob_vocs)\n", + "X = Xopt(generator=generator, evaluator=ev)\n", "for _ in range(64):\n", " X.step()" ] diff --git a/docs/examples/gp_model_creation/model_creation.ipynb b/docs/examples/gp_model_creation/model_creation.ipynb index 890c7901c..9ab3ee3e4 100644 --- a/docs/examples/gp_model_creation/model_creation.ipynb +++ b/docs/examples/gp_model_creation/model_creation.ipynb @@ -52,7 +52,7 @@ "# collect some data using random sampling\n", "evaluator = Evaluator(function=evaluate_rosenbrock)\n", "generator = RandomGenerator(vocs=vocs)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X.random_evaluate(15)" ] }, diff --git a/docs/examples/multi_objective_bayes_opt/mggpo.ipynb b/docs/examples/multi_objective_bayes_opt/mggpo.ipynb index 915295378..2de0a8883 100644 --- a/docs/examples/multi_objective_bayes_opt/mggpo.ipynb +++ b/docs/examples/multi_objective_bayes_opt/mggpo.ipynb @@ -64,7 +64,7 @@ "gen.n_monte_carlo_samples = N_MC_SAMPLES\n", "gen.numerical_optimizer.n_restarts = NUM_RESTARTS\n", "gen.gp_constructor.use_low_noise_prior = True\n", - "X = Xopt(evaluator=evaluator, generator=gen, vocs=vocs)\n", + "X = Xopt(evaluator=evaluator, generator=gen)\n", "X.evaluate_data(pd.DataFrame({\"x1\": [1.0, 0.75], \"x2\": [0.75, 1.0]}))\n", "\n", "X" diff --git a/docs/examples/multi_objective_bayes_opt/mobo.ipynb b/docs/examples/multi_objective_bayes_opt/mobo.ipynb index 71b0a2ecf..459e032e9 100644 --- a/docs/examples/multi_objective_bayes_opt/mobo.ipynb +++ b/docs/examples/multi_objective_bayes_opt/mobo.ipynb @@ -67,7 +67,7 @@ "generator.gp_constructor.use_low_noise_prior = True\n", "\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=tnk_vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X.evaluate_data(pd.DataFrame({\"x1\": [1.0, 0.75], \"x2\": [0.75, 1.0]}))\n", "\n", "for i in range(N_STEPS):\n", @@ -198,7 +198,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -212,7 +212,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/multi_objective_bayes_opt/multi_fidelity_mobo.ipynb b/docs/examples/multi_objective_bayes_opt/multi_fidelity_mobo.ipynb index 46f4a2cca..b9b5f2151 100644 --- a/docs/examples/multi_objective_bayes_opt/multi_fidelity_mobo.ipynb +++ b/docs/examples/multi_objective_bayes_opt/multi_fidelity_mobo.ipynb @@ -103,7 +103,7 @@ "generator.n_monte_carlo_samples = N_MC_SAMPLES\n", "generator.gp_constructor.use_low_noise_prior = True\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=my_vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "\n", "# evaluate at some explicit initial points\n", "X.evaluate_data(pd.DataFrame({\"x1\": [1.0, 0.75], \"x2\": [0.75, 1.0], \"s\": [0.0, 0.1]}))\n", @@ -327,7 +327,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/scipy/latin_hypercube.ipynb b/docs/examples/scipy/latin_hypercube.ipynb index cdf546c1d..ed79e0ef3 100644 --- a/docs/examples/scipy/latin_hypercube.ipynb +++ b/docs/examples/scipy/latin_hypercube.ipynb @@ -51,7 +51,7 @@ "# batches and the batch size determines the arrangement of points to cover\n", "# the bounded region of the variables.\n", "generator = LatinHypercubeGenerator(vocs=vocs, batch_size=1024)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -182,7 +182,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/sequential/extremum_seeking.ipynb b/docs/examples/sequential/extremum_seeking.ipynb index 3d7956ff4..1a7d83edd 100644 --- a/docs/examples/sequential/extremum_seeking.ipynb +++ b/docs/examples/sequential/extremum_seeking.ipynb @@ -168,22 +168,23 @@ " k: 2.0\n", " oscillation_size: 0.1\n", " decay_rate: 1.0\n", + " vocs:\n", + " variables:\n", + " p0: [-2, 2]\n", + " p1: [-2, 2]\n", + " p2: [-2, 2]\n", + " p3: [-2, 2]\n", + " p4: [-2, 2]\n", + " p5: [-2, 2]\n", + " p6: [-2, 2]\n", + " p7: [-2, 2]\n", + " p8: [-2, 2]\n", + " p9: [-2, 2]\n", + " objectives:\n", + " f: MINIMIZE\n", "evaluator:\n", " function: __main__.f_ES_minimize\n", - "vocs:\n", - " variables:\n", - " p0: [-2, 2]\n", - " p1: [-2, 2]\n", - " p2: [-2, 2]\n", - " p3: [-2, 2]\n", - " p4: [-2, 2]\n", - " p5: [-2, 2]\n", - " p6: [-2, 2]\n", - " p7: [-2, 2]\n", - " p8: [-2, 2]\n", - " p9: [-2, 2]\n", - " objectives:\n", - " f: MINIMIZE\n", + "\n", "\"\"\"\n", "\n", "X = Xopt.from_yaml(YAML)\n", @@ -413,7 +414,6 @@ "from xopt.stopping_conditions import MaxEvaluationsCondition\n", "\n", "X = Xopt(\n", - " vocs=vocs,\n", " evaluator=evaluator,\n", " generator=generator,\n", " stopping_condition=MaxEvaluationsCondition(max_evaluations=20),\n", @@ -586,7 +586,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/examples/sequential/neldermead.ipynb b/docs/examples/sequential/neldermead.ipynb index 5181e10a5..a6523cfaf 100644 --- a/docs/examples/sequential/neldermead.ipynb +++ b/docs/examples/sequential/neldermead.ipynb @@ -63,13 +63,14 @@ "generator:\n", " name: neldermead\n", " adaptive: true\n", + " vocs:\n", + " variables:\n", + " x0: [-5, 5]\n", + " x1: [-5, 5]\n", + " objectives: {y: MINIMIZE}\n", "evaluator:\n", " function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock\n", - "vocs:\n", - " variables:\n", - " x0: [-5, 5]\n", - " x1: [-5, 5]\n", - " objectives: {y: MINIMIZE}\n", + "\n", "\"\"\"\n", "X = Xopt.from_yaml(YAML)" ] @@ -450,7 +451,6 @@ "\n", "X = Xopt(\n", " evaluator=ev,\n", - " vocs=vocs,\n", " generator=NelderMeadGenerator(vocs=vocs),\n", " stopping_condition=MaxEvaluationsCondition(max_evaluations=50),\n", ")\n", @@ -561,17 +561,17 @@ " max_evaluations: 500\n", "generator:\n", " name: neldermead\n", + " vocs:\n", + " variables:\n", + " x1: [-5, 5]\n", + " x2: [-5, 5]\n", + " x3: [-5, 5]\n", + " x4: [-5, 5]\n", + " x5: [-5, 5]\n", + " objectives:\n", + " y: MINIMIZE\n", "evaluator:\n", " function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock\n", - "vocs:\n", - " variables:\n", - " x1: [-5, 5]\n", - " x2: [-5, 5]\n", - " x3: [-5, 5]\n", - " x4: [-5, 5]\n", - " x5: [-5, 5]\n", - " objectives:\n", - " y: MINIMIZE\n", "\"\"\"\n", "X = Xopt.from_yaml(YAML)" ] @@ -631,7 +631,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -645,7 +645,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/sequential/rcds.ipynb b/docs/examples/sequential/rcds.ipynb index 76ad4cd4e..38f573b65 100644 --- a/docs/examples/sequential/rcds.ipynb +++ b/docs/examples/sequential/rcds.ipynb @@ -133,14 +133,15 @@ " init_mat: null\n", " noise: 0.00001\n", " step: 0.01\n", + " vocs:\n", + " variables:\n", + " p0: [0, 1]\n", + " p1: [0, 1]\n", + " objectives:\n", + " f: MINIMIZE\n", "evaluator:\n", " function: __main__.f_test\n", - "vocs:\n", - " variables:\n", - " p0: [0, 1]\n", - " p1: [0, 1]\n", - " objectives:\n", - " f: MINIMIZE\n", + "\n", "\"\"\"\n", "\n", "X = Xopt.from_yaml(YAML)\n", diff --git a/docs/examples/single_objective_bayes_opt/bax_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/bax_tutorial.ipynb index a9868bce2..a08398540 100644 --- a/docs/examples/single_objective_bayes_opt/bax_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/bax_tutorial.ipynb @@ -155,7 +155,7 @@ "evaluator = Evaluator(function=sin_function)\n", "\n", "# construct Xopt optimizer\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { diff --git a/docs/examples/single_objective_bayes_opt/benchmarking.ipynb b/docs/examples/single_objective_bayes_opt/benchmarking.ipynb index 53a01b7c0..e163c205e 100644 --- a/docs/examples/single_objective_bayes_opt/benchmarking.ipynb +++ b/docs/examples/single_objective_bayes_opt/benchmarking.ipynb @@ -272,7 +272,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/bo_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/bo_tutorial.ipynb index c0388cdc6..19f5d4470 100644 --- a/docs/examples/single_objective_bayes_opt/bo_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/bo_tutorial.ipynb @@ -100,7 +100,7 @@ "evaluator = Evaluator(function=sin_function)\n", "generator = UpperConfidenceBoundGenerator(vocs=vocs)\n", "generator.gp_constructor.use_low_noise_prior = True\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { @@ -291,7 +291,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -305,7 +305,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb index b1619d390..a7c34b31e 100644 --- a/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb @@ -122,7 +122,7 @@ "evaluator = Evaluator(function=test_function)\n", "generator_ei = ExpectedImprovementGenerator(vocs=vocs)\n", "generator_ei.gp_constructor.use_low_noise_prior = True\n", - "X_ei = Xopt(evaluator=evaluator, generator=generator_ei, vocs=vocs)" + "X_ei = Xopt(evaluator=evaluator, generator=generator_ei)" ] }, { diff --git a/docs/examples/single_objective_bayes_opt/custom_objective.ipynb b/docs/examples/single_objective_bayes_opt/custom_objective.ipynb index 2cd60e5b9..9e56205ad 100644 --- a/docs/examples/single_objective_bayes_opt/custom_objective.ipynb +++ b/docs/examples/single_objective_bayes_opt/custom_objective.ipynb @@ -116,7 +116,7 @@ " custom_objective=MyObjective(vocs),\n", ")\n", "generator.gp_constructor.use_low_noise_prior = True\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)\n", + "X = Xopt(evaluator=evaluator, generator=generator)\n", "print(X)\n", "\n", "X.random_evaluate(2)\n", @@ -209,7 +209,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -223,7 +223,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/fast_model_eval.ipynb b/docs/examples/single_objective_bayes_opt/fast_model_eval.ipynb index e5ec3e104..ef9a5a260 100644 --- a/docs/examples/single_objective_bayes_opt/fast_model_eval.ipynb +++ b/docs/examples/single_objective_bayes_opt/fast_model_eval.ipynb @@ -44,7 +44,7 @@ "\n", "evaluator = Evaluator(function=evaluate_TNK)\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "generator = X.generator" ] }, diff --git a/docs/examples/single_objective_bayes_opt/fixed_features.ipynb b/docs/examples/single_objective_bayes_opt/fixed_features.ipynb index 471a73494..906abf9ea 100644 --- a/docs/examples/single_objective_bayes_opt/fixed_features.ipynb +++ b/docs/examples/single_objective_bayes_opt/fixed_features.ipynb @@ -65,7 +65,7 @@ "\n", "evaluator = Evaluator(function=evaluate_rosenbrock)\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -287,7 +287,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -301,7 +301,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/hessian_kernel.ipynb b/docs/examples/single_objective_bayes_opt/hessian_kernel.ipynb index f4f0dad36..18aa26330 100644 --- a/docs/examples/single_objective_bayes_opt/hessian_kernel.ipynb +++ b/docs/examples/single_objective_bayes_opt/hessian_kernel.ipynb @@ -78,7 +78,7 @@ "\n", "evaluator = Evaluator(function=evaluate_TNK)\n", "\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, diff --git a/docs/examples/single_objective_bayes_opt/heteroskedastic_noise_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/heteroskedastic_noise_tutorial.ipynb index 7674cc65a..c9057dc27 100644 --- a/docs/examples/single_objective_bayes_opt/heteroskedastic_noise_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/heteroskedastic_noise_tutorial.ipynb @@ -127,7 +127,7 @@ "source": [ "evaluator = Evaluator(function=sin_function)\n", "generator = UpperConfidenceBoundGenerator(vocs=vocs)\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { diff --git a/docs/examples/single_objective_bayes_opt/interpolate_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/interpolate_tutorial.ipynb index 4d19454b1..8db2814b3 100644 --- a/docs/examples/single_objective_bayes_opt/interpolate_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/interpolate_tutorial.ipynb @@ -136,7 +136,7 @@ "\n", "\n", "evaluator = Evaluator(function=sphere_function)\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { @@ -276,7 +276,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -290,7 +290,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/multi_fidelity_simple.ipynb b/docs/examples/single_objective_bayes_opt/multi_fidelity_simple.ipynb index 61020eb53..9f9e62b0d 100644 --- a/docs/examples/single_objective_bayes_opt/multi_fidelity_simple.ipynb +++ b/docs/examples/single_objective_bayes_opt/multi_fidelity_simple.ipynb @@ -144,7 +144,7 @@ "# pass options to the generator\n", "evaluator = Evaluator(function=test_function)\n", "\n", - "X = Xopt(vocs=vocs, generator=generator, evaluator=evaluator)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "X" ] }, @@ -317,7 +317,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "xopt-dev", "language": "python", "name": "python3" }, @@ -331,7 +331,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/single_objective_bayes_opt/noisy_bo_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/noisy_bo_tutorial.ipynb index 200953426..ebdea5dc6 100644 --- a/docs/examples/single_objective_bayes_opt/noisy_bo_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/noisy_bo_tutorial.ipynb @@ -116,7 +116,7 @@ "evaluator = Evaluator(function=sin_function)\n", "gp_constructor = StandardModelConstructor(use_low_noise_prior=False)\n", "generator = UpperConfidenceBoundGenerator(vocs=vocs, gp_constructor=gp_constructor)\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { diff --git a/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb b/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb index db3170a75..8d344a0c4 100644 --- a/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb +++ b/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb @@ -151,7 +151,7 @@ "generator.gp_constructor.use_low_noise_prior = True\n", "\n", "start_time = time.time()\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)\n", + "X = Xopt(evaluator=evaluator, generator=generator)\n", "X.random_evaluate(2)\n", "\n", "for _ in trange(N_STEPS):\n", diff --git a/docs/examples/single_objective_bayes_opt/upper_confidence_bound.ipynb b/docs/examples/single_objective_bayes_opt/upper_confidence_bound.ipynb index cd1e6b50a..cee6b16f1 100644 --- a/docs/examples/single_objective_bayes_opt/upper_confidence_bound.ipynb +++ b/docs/examples/single_objective_bayes_opt/upper_confidence_bound.ipynb @@ -41,15 +41,15 @@ "generator:\n", " name: upper_confidence_bound\n", " beta: 0.1\n", - "\n", + " vocs:\n", + " variables:\n", + " x1: [0, 6.28]\n", + " objectives:\n", + " y1: 'MINIMIZE'\n", "evaluator:\n", " function: xopt.resources.test_functions.sinusoid_1d.evaluate_sinusoid\n", "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 6.28]\n", - " objectives:\n", - " y1: 'MINIMIZE'\n", + "\n", "\"\"\"" ] }, diff --git a/docs/examples/trust_region_bo/turbo_basics.ipynb b/docs/examples/trust_region_bo/turbo_basics.ipynb index c550a79b5..4d8f40987 100644 --- a/docs/examples/trust_region_bo/turbo_basics.ipynb +++ b/docs/examples/trust_region_bo/turbo_basics.ipynb @@ -214,7 +214,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/trust_region_bo/turbo_optimize.ipynb b/docs/examples/trust_region_bo/turbo_optimize.ipynb index 1cbb4b1c9..44fbfc587 100644 --- a/docs/examples/trust_region_bo/turbo_optimize.ipynb +++ b/docs/examples/trust_region_bo/turbo_optimize.ipynb @@ -110,7 +110,7 @@ "generator = ExpectedImprovementGenerator(vocs=vocs, turbo_controller=\"optimize\")\n", "generator.gp_constructor.use_low_noise_prior = True\n", "\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { @@ -353,7 +353,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/examples/trust_region_bo/turbo_safety.ipynb b/docs/examples/trust_region_bo/turbo_safety.ipynb index e15b4ffb3..dab1b60a0 100644 --- a/docs/examples/trust_region_bo/turbo_safety.ipynb +++ b/docs/examples/trust_region_bo/turbo_safety.ipynb @@ -106,7 +106,7 @@ "generator.turbo_controller.length = (\n", " 0.05 # set the initial trust region length scale to 5% of the range\n", ")\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { From ab1dc77dd19953393e7f2d533a4b5142933b1646 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:19:16 -0600 Subject: [PATCH 06/16] linting --- docs/examples/basic/xopt_evaluator.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/examples/basic/xopt_evaluator.ipynb b/docs/examples/basic/xopt_evaluator.ipynb index eb132ceb0..e725d46a6 100644 --- a/docs/examples/basic/xopt_evaluator.ipynb +++ b/docs/examples/basic/xopt_evaluator.ipynb @@ -318,7 +318,8 @@ "outputs": [], "source": [ "X = Xopt(\n", - " generator=RandomGenerator(vocs=vocs), evaluator=Evaluator(function=f),\n", + " generator=RandomGenerator(vocs=vocs),\n", + " evaluator=Evaluator(function=f),\n", ")\n", "X.strict = False\n", "\n", From 9caad9d31ceadc058eaaf2ab3e753fdcf5529b5a Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:21:23 -0600 Subject: [PATCH 07/16] Update test_mpi.py --- xopt/tests/test_mpi.py | 49 +++++++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/xopt/tests/test_mpi.py b/xopt/tests/test_mpi.py index e720c8091..4de7830b1 100644 --- a/xopt/tests/test_mpi.py +++ b/xopt/tests/test_mpi.py @@ -31,16 +31,15 @@ def test_mpi(self): generator: name: random - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: dummy_constant} + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: dummy_constant} """ @@ -68,22 +67,22 @@ def test_with_cnsga(self): generator: name: cnsga population_size: 64 + vocs: + variables: + x1: [0, 3.14159] + x2: [0, 3.14159] + objectives: {y1: MINIMIZE, y2: MINIMIZE} + constraints: + c1: [GREATER_THAN, 0] + c2: [LESS_THAN, 0.5] + constants: {a: dummy_constant} + + evaluator: + function: xopt.resources.test_functions.tnk.evaluate_TNK + function_kwargs: + sleep: 0 + random_sleep: 0.1 - evaluator: - function: xopt.resources.test_functions.tnk.evaluate_TNK - function_kwargs: - sleep: 0 - random_sleep: 0.1 - - vocs: - variables: - x1: [0, 3.14159] - x2: [0, 3.14159] - objectives: {y1: MINIMIZE, y2: MINIMIZE} - constraints: - c1: [GREATER_THAN, 0] - c2: [LESS_THAN, 0.5] - constants: {a: dummy_constant} """ # run batched mode From 3e92348a627bbca3220cb53c1d780a46c5e23fac Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:34:08 -0600 Subject: [PATCH 08/16] update examples/mpi test --- .../basic/checkpointing_and_restarts.ipynb | 19 ++-- docs/examples/ga/cnsga_tnk.ipynb | 38 +++---- docs/examples/ga/nsga2/nsga2_python.ipynb | 2 +- .../nsga2_from_checkpoint.yml | 98 ++++++++--------- .../assets/yaml_runner_example/nsga2_zdt3.yml | 100 +++++++++--------- docs/examples/scipy/latin_hypercube.ipynb | 2 +- docs/examples/sequential/rcds.ipynb | 15 +-- .../constrained_bo_tutorial.ipynb | 2 +- .../fixed_features.ipynb | 2 +- .../time_dependent_bo.ipynb | 2 +- xopt/tests/test_mpi.py | 10 +- 11 files changed, 145 insertions(+), 145 deletions(-) diff --git a/docs/examples/basic/checkpointing_and_restarts.ipynb b/docs/examples/basic/checkpointing_and_restarts.ipynb index dd76ad6bc..30750324f 100644 --- a/docs/examples/basic/checkpointing_and_restarts.ipynb +++ b/docs/examples/basic/checkpointing_and_restarts.ipynb @@ -33,22 +33,21 @@ "dump_file: dump.yml\n", "generator:\n", " name: random\n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", + " constraints:\n", + " c1: [GREATER_THAN, 0]\n", + " c2: [LESS_THAN, 0.5]\n", + " constants: {a: dummy_constant}\n", "\n", "evaluator:\n", " function: xopt.resources.test_functions.tnk.evaluate_TNK\n", " function_kwargs:\n", " a: 999\n", "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", - " constraints:\n", - " c1: [GREATER_THAN, 0]\n", - " c2: [LESS_THAN, 0.5]\n", - " constants: {a: dummy_constant}\n", - "\n", "\"\"\"" ] }, diff --git a/docs/examples/ga/cnsga_tnk.ipynb b/docs/examples/ga/cnsga_tnk.ipynb index c4454f258..7ffb4ca5f 100644 --- a/docs/examples/ga/cnsga_tnk.ipynb +++ b/docs/examples/ga/cnsga_tnk.ipynb @@ -208,21 +208,22 @@ " population_size: 32\n", " population_file: test.csv\n", " output_path: .\n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", + " constraints:\n", + " c1: [GREATER_THAN, 0]\n", + " c2: [LESS_THAN, 0.5]\n", + " constants: {a: dummy_constant}\n", "\n", "evaluator:\n", " function: xopt.resources.test_functions.tnk.evaluate_TNK\n", " function_kwargs:\n", " raise_probability: 0.1\n", "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", - " constraints:\n", - " c1: [GREATER_THAN, 0]\n", - " c2: [LESS_THAN, 0.5]\n", - " constants: {a: dummy_constant}\n", + "\n", "\n", "\"\"\"\n", "\n", @@ -472,6 +473,15 @@ "generator:\n", " name: cnsga\n", " population_size: 32\n", + " vocs:\n", + " variables:\n", + " x1: [0, 3.14159]\n", + " x2: [0, 3.14159]\n", + " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", + " constraints:\n", + " c1: [GREATER_THAN, 0]\n", + " c2: [LESS_THAN, 0.5]\n", + " constants: {a: dummy_constant}\n", "\n", "evaluator:\n", " function: __main__.evaluate_TNK2\n", @@ -480,16 +490,6 @@ " vectorized: True\n", " max_workers: 32\n", "\n", - "vocs:\n", - " variables:\n", - " x1: [0, 3.14159]\n", - " x2: [0, 3.14159]\n", - " objectives: {y1: MINIMIZE, y2: MINIMIZE}\n", - " constraints:\n", - " c1: [GREATER_THAN, 0]\n", - " c2: [LESS_THAN, 0.5]\n", - " constants: {a: dummy_constant}\n", - "\n", "\"\"\"\n", "\n", "\n", diff --git a/docs/examples/ga/nsga2/nsga2_python.ipynb b/docs/examples/ga/nsga2/nsga2_python.ipynb index 7c9d6f97e..de580ff77 100644 --- a/docs/examples/ga/nsga2/nsga2_python.ipynb +++ b/docs/examples/ga/nsga2/nsga2_python.ipynb @@ -326,7 +326,7 @@ "restored_generator = NSGA2Generator(checkpoint_file=last_checkpoint)\n", "\n", "# Demonstrate using the generator after loading (starting optimization from its last saved point)\n", - "restored_xopt = Xopt(generator=restored_generator, evaluator=ev, vocs=prob_vocs)\n", + "restored_xopt = Xopt(generator=restored_generator, evaluator=ev)\n", "for _ in range(32):\n", " restored_xopt.step()\n", "print(f\"Further optimization: {restored_generator}\")\n", diff --git a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml index 1b57b895a..7e62f57bf 100644 --- a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml +++ b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml @@ -6,6 +6,55 @@ generator: name: nsga2 # Use `NSGA2Generator` checkpoint_file: nsga2_output/checkpoints/20250805_065102_1.txt # Path to the checkoint to start from output_dir: nsga2_from_checkpoint_output # Where data will be output, this overrides checkpoint settings + # Define the variables, objectives and constraints + vocs: + # Each decision variable with its lower and upper bound + variables: + x1: [0, 1] + x2: [0, 1] + x3: [0, 1] + x4: [0, 1] + x5: [0, 1] + x6: [0, 1] + x7: [0, 1] + x8: [0, 1] + x9: [0, 1] + x10: [0, 1] + x11: [0, 1] + x12: [0, 1] + x13: [0, 1] + x14: [0, 1] + x15: [0, 1] + x16: [0, 1] + x17: [0, 1] + x18: [0, 1] + x19: [0, 1] + x20: [0, 1] + x21: [0, 1] + x22: [0, 1] + x23: [0, 1] + x24: [0, 1] + x25: [0, 1] + x26: [0, 1] + x27: [0, 1] + x28: [0, 1] + x29: [0, 1] + x30: [0, 1] + + # Name of the objectives and which direction they are (MINIMIZE or MAXIMIZE) + objectives: + f1: MINIMIZE + f2: MINIMIZE + + # We don't have constraints, but an example of what their definition looks like is included below + # constraints: + # g1: ["LESS_THAN", 0] + # g2: ["GREATER_THAN", 0] + + # Constants may also be passed, these are used within the evaluation function + # constants: + # const1: 0.1 + # const2: "path/to/template/file.txt" evaluator: # Xopt will import the module `eval_fun` and use the function `eval_fun` in it @@ -21,52 +70,3 @@ evaluator: vectorized: true max_workers: 50 -# Define the variables, objectives and constraints -vocs: - # Each decision variable with its lower and upper bound - variables: - x1: [0, 1] - x2: [0, 1] - x3: [0, 1] - x4: [0, 1] - x5: [0, 1] - x6: [0, 1] - x7: [0, 1] - x8: [0, 1] - x9: [0, 1] - x10: [0, 1] - x11: [0, 1] - x12: [0, 1] - x13: [0, 1] - x14: [0, 1] - x15: [0, 1] - x16: [0, 1] - x17: [0, 1] - x18: [0, 1] - x19: [0, 1] - x20: [0, 1] - x21: [0, 1] - x22: [0, 1] - x23: [0, 1] - x24: [0, 1] - x25: [0, 1] - x26: [0, 1] - x27: [0, 1] - x28: [0, 1] - x29: [0, 1] - x30: [0, 1] - - # Name of the objectives and which direction they are (MINIMIZE or MAXIMIZE) - objectives: - f1: MINIMIZE - f2: MINIMIZE - - # We don't have constraints, but an example of what their definition looks like is included below - # constraints: - # g1: ["LESS_THAN", 0] - # g2: ["GREATER_THAN", 0] - - # Constants may also be passed, these are used within the evaluation function - # constants: - # const1: 0.1 - # const2: "path/to/template/file.txt" diff --git a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_zdt3.yml b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_zdt3.yml index ed8eea10f..10d49377e 100644 --- a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_zdt3.yml +++ b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_zdt3.yml @@ -6,6 +6,56 @@ generator: name: nsga2 # Use `NSGA2Generator` population_size: 50 # Number of individuals in each population output_dir: nsga2_output # Where data will be output, remove to not output anything + # Define the variables, objectives and constraints + vocs: + # Each decision variable with its lower and upper bound + variables: + x1: [0, 1] + x2: [0, 1] + x3: [0, 1] + x4: [0, 1] + x5: [0, 1] + x6: [0, 1] + x7: [0, 1] + x8: [0, 1] + x9: [0, 1] + x10: [0, 1] + x11: [0, 1] + x12: [0, 1] + x13: [0, 1] + x14: [0, 1] + x15: [0, 1] + x16: [0, 1] + x17: [0, 1] + x18: [0, 1] + x19: [0, 1] + x20: [0, 1] + x21: [0, 1] + x22: [0, 1] + x23: [0, 1] + x24: [0, 1] + x25: [0, 1] + x26: [0, 1] + x27: [0, 1] + x28: [0, 1] + x29: [0, 1] + x30: [0, 1] + + # Name of the objectives and which direction they are (MINIMIZE or MAXIMIZE) + objectives: + f1: MINIMIZE + f2: MINIMIZE + + # We don't have constraints, but an example of what their definition looks like is included below + # constraints: + # g1: ["LESS_THAN", 0] + # g2: ["GREATER_THAN", 0] + + # Constants may also be passed, these are used within the evaluation function + # constants: + # const1: 0.1 + # const2: "path/to/template/file.txt" + evaluator: # Xopt will import the module `eval_fun` and use the function `eval_fun` in it @@ -20,53 +70,3 @@ evaluator: # to the population size so that a full generation is evaluated at once. vectorized: true max_workers: 50 - -# Define the variables, objectives and constraints -vocs: - # Each decision variable with its lower and upper bound - variables: - x1: [0, 1] - x2: [0, 1] - x3: [0, 1] - x4: [0, 1] - x5: [0, 1] - x6: [0, 1] - x7: [0, 1] - x8: [0, 1] - x9: [0, 1] - x10: [0, 1] - x11: [0, 1] - x12: [0, 1] - x13: [0, 1] - x14: [0, 1] - x15: [0, 1] - x16: [0, 1] - x17: [0, 1] - x18: [0, 1] - x19: [0, 1] - x20: [0, 1] - x21: [0, 1] - x22: [0, 1] - x23: [0, 1] - x24: [0, 1] - x25: [0, 1] - x26: [0, 1] - x27: [0, 1] - x28: [0, 1] - x29: [0, 1] - x30: [0, 1] - - # Name of the objectives and which direction they are (MINIMIZE or MAXIMIZE) - objectives: - f1: MINIMIZE - f2: MINIMIZE - - # We don't have constraints, but an example of what their definition looks like is included below - # constraints: - # g1: ["LESS_THAN", 0] - # g2: ["GREATER_THAN", 0] - - # Constants may also be passed, these are used within the evaluation function - # constants: - # const1: 0.1 - # const2: "path/to/template/file.txt" diff --git a/docs/examples/scipy/latin_hypercube.ipynb b/docs/examples/scipy/latin_hypercube.ipynb index ed79e0ef3..6de74fb92 100644 --- a/docs/examples/scipy/latin_hypercube.ipynb +++ b/docs/examples/scipy/latin_hypercube.ipynb @@ -152,7 +152,7 @@ "source": [ "n = 16\n", "generator = LatinHypercubeGenerator(vocs=vocs, batch_size=n, scramble=True, seed=0)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "for _ in range(n):\n", " X.step()\n", "\n", diff --git a/docs/examples/sequential/rcds.ipynb b/docs/examples/sequential/rcds.ipynb index 38f573b65..1c2890b29 100644 --- a/docs/examples/sequential/rcds.ipynb +++ b/docs/examples/sequential/rcds.ipynb @@ -203,14 +203,15 @@ " init_mat: null\n", " noise: 1e-8\n", " step: 0.01\n", + " vocs:\n", + " variables:\n", + " p0: [1, 3]\n", + " p1: [-4, 2]\n", + " objectives:\n", + " f: MAXIMIZE\n", "evaluator:\n", " function: __main__.f_test\n", - "vocs:\n", - " variables:\n", - " p0: [1, 3]\n", - " p1: [-4, 2]\n", - " objectives:\n", - " f: MAXIMIZE\n", + "\n", "\"\"\"\n", "\n", "X = Xopt.from_yaml(YAML)\n", @@ -393,7 +394,7 @@ }, "outputs": [], "source": [ - "X = Xopt(vocs=vocs, evaluator=evaluator, generator=generator)" + "X = Xopt(evaluator=evaluator, generator=generator)" ] }, { diff --git a/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb b/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb index a7c34b31e..db561c2e8 100644 --- a/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb +++ b/docs/examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb @@ -266,7 +266,7 @@ "generator_ucb.gp_constructor.use_low_noise_prior = True\n", "\n", "# Create new Xopt object for UCB\n", - "X_ucb = Xopt(evaluator=evaluator, generator=generator_ucb, vocs=vocs)\n", + "X_ucb = Xopt(evaluator=evaluator, generator=generator_ucb)\n", "\n", "print(\"UCB Generator configuration:\")\n", "print(f\"Shift parameter: {generator_ucb.shift}\")\n", diff --git a/docs/examples/single_objective_bayes_opt/fixed_features.ipynb b/docs/examples/single_objective_bayes_opt/fixed_features.ipynb index 906abf9ea..13c96787b 100644 --- a/docs/examples/single_objective_bayes_opt/fixed_features.ipynb +++ b/docs/examples/single_objective_bayes_opt/fixed_features.ipynb @@ -245,7 +245,7 @@ "\n", "evaluator = Evaluator(function=evaluate_rosenbrock)\n", "\n", - "X2 = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X2 = Xopt(generator=generator, evaluator=evaluator)\n", "X2.add_data(X.data)" ] }, diff --git a/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb b/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb index 8d344a0c4..78b28a900 100644 --- a/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb +++ b/docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb @@ -313,7 +313,7 @@ "\n", "start_time = time.time()\n", "\n", - "X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)\n", + "X = Xopt(evaluator=evaluator, generator=generator)\n", "X.random_evaluate(2)\n", "\n", "for i in trange(N_STEPS):\n", diff --git a/xopt/tests/test_mpi.py b/xopt/tests/test_mpi.py index 4de7830b1..8fb6c8cf1 100644 --- a/xopt/tests/test_mpi.py +++ b/xopt/tests/test_mpi.py @@ -77,11 +77,11 @@ def test_with_cnsga(self): c2: [LESS_THAN, 0.5] constants: {a: dummy_constant} - evaluator: - function: xopt.resources.test_functions.tnk.evaluate_TNK - function_kwargs: - sleep: 0 - random_sleep: 0.1 + evaluator: + function: xopt.resources.test_functions.tnk.evaluate_TNK + function_kwargs: + sleep: 0 + random_sleep: 0.1 """ From 6c2abf0260a0782e52683520ca1dd7e29da4a2b4 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:36:13 -0600 Subject: [PATCH 09/16] linting --- .../assets/yaml_runner_example/nsga2_from_checkpoint.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml index 7e62f57bf..a6a70e7cf 100644 --- a/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml +++ b/docs/examples/ga/nsga2/yaml_interface/assets/yaml_runner_example/nsga2_from_checkpoint.yml @@ -69,4 +69,3 @@ evaluator: # to the population size so that a full generation is evaluated at once. vectorized: true max_workers: 50 - From 7ec0d012b4f56c58015ac4bedd2435e061b311b9 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:44:32 -0600 Subject: [PATCH 10/16] fix examples --- docs/examples/ga/nsga2/nsga2_python.ipynb | 1 - docs/examples/scipy/latin_hypercube.ipynb | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/examples/ga/nsga2/nsga2_python.ipynb b/docs/examples/ga/nsga2/nsga2_python.ipynb index de580ff77..08d5ee2ac 100644 --- a/docs/examples/ga/nsga2/nsga2_python.ipynb +++ b/docs/examples/ga/nsga2/nsga2_python.ipynb @@ -265,7 +265,6 @@ "my_xoptX = Xopt(\n", " generator=NSGA2Generator(vocs=prob_vocs, output_dir=output_dir),\n", " evaluator=ev,\n", - " vocs=prob_vocs,\n", ")\n", "for _ in range(32):\n", " my_xopt.step()\n", diff --git a/docs/examples/scipy/latin_hypercube.ipynb b/docs/examples/scipy/latin_hypercube.ipynb index 6de74fb92..0f9eb4666 100644 --- a/docs/examples/scipy/latin_hypercube.ipynb +++ b/docs/examples/scipy/latin_hypercube.ipynb @@ -117,7 +117,7 @@ "source": [ "n = 16\n", "generator = LatinHypercubeGenerator(vocs=vocs, batch_size=n, scramble=False, seed=0)\n", - "X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)\n", + "X = Xopt(generator=generator, evaluator=evaluator)\n", "for _ in range(n):\n", " X.step()\n", "\n", From e523cd11dd00a7892c70298553ea68ecb89f1c87 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 14:54:19 -0600 Subject: [PATCH 11/16] add in verbose error message --- xopt/base.py | 6 ++++++ xopt/tests/test_xopt.py | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/xopt/base.py b/xopt/base.py index 4aac5633c..d0df7c64d 100644 --- a/xopt/base.py +++ b/xopt/base.py @@ -139,6 +139,12 @@ def validate_model(cls, data: Any): # make a copy of the generator / vocs objects to avoid modifying the original data["generator"] = deepcopy(data["generator"]) + # raise a more verbose vocs error for Xopt 3.0 + if "vocs" in data.keys(): + raise ValueError( + "As of Xopt 3.0, VOCS is no longer passed to the Xopt object, it is only specified in the generator." + ) + return data @field_validator("evaluator", mode="before") diff --git a/xopt/tests/test_xopt.py b/xopt/tests/test_xopt.py index 49b185acf..a49bbfa17 100644 --- a/xopt/tests/test_xopt.py +++ b/xopt/tests/test_xopt.py @@ -506,6 +506,16 @@ def test_copying_generators(self): assert X.generator is not X2.generator + def test_vocs_error(self): + evaluator = Evaluator(function=xtest_callable) + + with pytest.raises(ValueError): + Xopt( + generator=RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)), + evaluator=evaluator, + vocs=TEST_VOCS_BASE, + ) + @pytest.fixture(scope="module", autouse=True) def clean_up(self): yield From e12e69fb60c9d62ce972879e225759eb32f6ba60 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 15:12:29 -0600 Subject: [PATCH 12/16] Update test_stopping_condition.py --- xopt/tests/test_stopping_condition.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/xopt/tests/test_stopping_condition.py b/xopt/tests/test_stopping_condition.py index 9d31fab5f..58ab64e8c 100644 --- a/xopt/tests/test_stopping_condition.py +++ b/xopt/tests/test_stopping_condition.py @@ -492,7 +492,6 @@ def test_max_evaluations_backward_compatibility_python( # Initialize Xopt with old max_evaluations parameter X = Xopt( - vocs=simple_vocs, evaluator=evaluator, generator=generator, max_evaluations=5, @@ -508,14 +507,15 @@ def test_max_evaluations_backward_compatibility_yaml( ): """Test backward compatibility: max_evaluations from YAML creates MaxEvaluationsCondition.""" yaml_config = """ -vocs: - variables: - x1: [0.0, 1.0] - x2: [0.0, 1.0] - objectives: - f1: EXPLORE + generator: name: latin_hypercube + vocs: + variables: + x1: [0.0, 1.0] + x2: [0.0, 1.0] + objectives: + f1: EXPLORE evaluator: function: __test_function__ max_evaluations: 3 @@ -539,7 +539,6 @@ def test_max_evaluations_and_stopping_condition_raises_error( with pytest.raises(ValueError, match="Cannot specify both"): Xopt( - vocs=simple_vocs, evaluator=evaluator, generator=generator, max_evaluations=10, From 8e5b983bafe17254b2277cd017609baa091ca802 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Mon, 9 Feb 2026 15:52:44 -0600 Subject: [PATCH 13/16] update prior mean docstring --- xopt/generators/bayesian/models/prior_mean.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/xopt/generators/bayesian/models/prior_mean.py b/xopt/generators/bayesian/models/prior_mean.py index db0d8fe83..fc3697211 100644 --- a/xopt/generators/bayesian/models/prior_mean.py +++ b/xopt/generators/bayesian/models/prior_mean.py @@ -15,6 +15,12 @@ def __init__( ): """ Custom prior mean for a GP based on an arbitrary model. + Torch model should contain a `forward` method that takes + in a tensor of shape `b x n x d` and outputs a tensor of shape `b x n x 1` + where `d` correponds to the number of variables in the input space (usually + specified by VOCs) and `b` corresponds to the batch size. The + input and output transformers are used to transform the inputs and + outputs of the model to the space of the GP. Parameters ---------- From c73b576c25ab473c3cbb732981415ea903a65199 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Thu, 12 Feb 2026 09:54:48 -0600 Subject: [PATCH 14/16] linting and test fixes --- .../generators/bayesian/bayesian_generator.py | 2 +- xopt/tests/generators/bayesian/test_mobo.py | 1 - xopt/tests/test_asynch_xopt.py | 24 +------------------ 3 files changed, 2 insertions(+), 25 deletions(-) diff --git a/xopt/generators/bayesian/bayesian_generator.py b/xopt/generators/bayesian/bayesian_generator.py index 38fba8de9..5173e8e9d 100644 --- a/xopt/generators/bayesian/bayesian_generator.py +++ b/xopt/generators/bayesian/bayesian_generator.py @@ -28,7 +28,7 @@ from pydantic_core.core_schema import ValidationInfo from torch import Tensor -from gest_api.vocs import MinimizeObjective, MaximizeObjective, VOCS +from gest_api.vocs import MinimizeObjective, MaximizeObjective from xopt.errors import VOCSError, XoptError, FeasibilityError from xopt.generator import Generator diff --git a/xopt/tests/generators/bayesian/test_mobo.py b/xopt/tests/generators/bayesian/test_mobo.py index 5247762cc..2c7531aea 100644 --- a/xopt/tests/generators/bayesian/test_mobo.py +++ b/xopt/tests/generators/bayesian/test_mobo.py @@ -52,7 +52,6 @@ def test_init(self): with pytest.raises(VOCSError): MOBOGenerator(vocs=bad_vocs, reference_point=TEST_VOCS_REF_POINT) - @pytest.mark.parametrize("use_cuda", cuda_combinations) def test_generate(self, use_cuda): gen = MOBOGenerator(vocs=TEST_VOCS_BASE_MO, reference_point=TEST_VOCS_REF_POINT) diff --git a/xopt/tests/test_asynch_xopt.py b/xopt/tests/test_asynch_xopt.py index a42723dcb..c0eaece1e 100644 --- a/xopt/tests/test_asynch_xopt.py +++ b/xopt/tests/test_asynch_xopt.py @@ -17,7 +17,6 @@ def test_asynch(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) n_steps = 5 for i in range(n_steps): @@ -32,7 +31,6 @@ def test_asynch(self): X2 = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) n_steps = 5 @@ -78,9 +76,7 @@ def bad_function_sometimes(inval): evaluator = Evaluator(function=bad_function_sometimes) gen = RandomGenerator(vocs=deepcopy(TEST_VOCS_BASE)) - X = AsynchronousXopt( - generator=gen, evaluator=evaluator, vocs=deepcopy(TEST_VOCS_BASE) - ) + X = AsynchronousXopt(generator=gen, evaluator=evaluator) X.strict = False # Submit to the evaluator some new inputs @@ -100,7 +96,6 @@ def test_unique_indices(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Run multiple steps to create potential race conditions @@ -123,7 +118,6 @@ def test_submit_data_different_formats(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Test with DataFrame (within bounds: x1 [0,1], x2 [0,10]) @@ -153,7 +147,6 @@ def test_prepare_input_data(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Test basic input preparation @@ -172,7 +165,6 @@ def test_data_lock_property(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Initially None (or not yet created) @@ -202,7 +194,6 @@ def test_pickle_compatibility(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Add some data @@ -213,7 +204,6 @@ def test_pickle_compatibility(self): X_new = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) X_new.__setstate__(state) @@ -228,7 +218,6 @@ def test_add_data_thread_safety(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Function to add data from multiple threads @@ -271,7 +260,6 @@ def test_step_with_no_futures(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # First step should generate max_workers candidates @@ -287,7 +275,6 @@ def test_step_when_done(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) X.is_done = True @@ -311,7 +298,6 @@ def failing_function(inval): X_strict = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), strict=True, ) @@ -319,7 +305,6 @@ def failing_function(inval): X_non_strict = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), strict=False, ) @@ -340,7 +325,6 @@ def test_multiple_runs_for_stochastic_stability(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Run several steps @@ -374,7 +358,6 @@ def vector_function(inputs): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Submit multiple data points @@ -391,7 +374,6 @@ def test_index_collision_fallback(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Add some initial data @@ -425,7 +407,6 @@ def failing_function(inval): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), strict=True, ) @@ -447,7 +428,6 @@ def test_duplicate_index_cleanup(self): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Manually create a DataFrame with duplicate indices to test cleanup @@ -486,7 +466,6 @@ def vector_function(inputs): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), ) # Submit multiple data points @@ -513,7 +492,6 @@ def ingest(self, data): X = AsynchronousXopt( generator=generator, evaluator=evaluator, - vocs=deepcopy(TEST_VOCS_BASE), strict=False, # Non-strict to allow error handling ) From 2e3f86001a38ea4b382ffe670d8168144234f5f5 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Thu, 12 Feb 2026 11:02:18 -0600 Subject: [PATCH 15/16] Update test_stopping_condition.py --- xopt/tests/test_stopping_condition.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/xopt/tests/test_stopping_condition.py b/xopt/tests/test_stopping_condition.py index 9f732d217..35a6cc4b4 100644 --- a/xopt/tests/test_stopping_condition.py +++ b/xopt/tests/test_stopping_condition.py @@ -515,14 +515,14 @@ def test_max_evaluations_backward_compatibility_yaml( def test_stopping_condition_yaml_deserialization(self, simple_vocs, test_function): """Test that a stopping condition is correctly instantiated from YAML.""" yaml_config = """ -vocs: - variables: - x1: [0.0, 1.0] - x2: [0.0, 1.0] - objectives: - f1: EXPLORE generator: name: latin_hypercube + vocs: + variables: + x1: [0.0, 1.0] + x2: [0.0, 1.0] + objectives: + f1: EXPLORE evaluator: function: __test_function__ stopping_condition: From 46810af88bdbef3fb6588b29d3a9e8f2b552a590 Mon Sep 17 00:00:00 2001 From: Ryan Roussel Date: Thu, 12 Feb 2026 11:25:22 -0600 Subject: [PATCH 16/16] fix test --- xopt/tests/test_pydantic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xopt/tests/test_pydantic.py b/xopt/tests/test_pydantic.py index f63f01d28..bcb94d550 100644 --- a/xopt/tests/test_pydantic.py +++ b/xopt/tests/test_pydantic.py @@ -486,7 +486,7 @@ class M(XoptBaseModel): # test torch load in XoptBaseModel torch.save(torch.nn.Linear(2, 2), tmp_path / "model.pt") - M.validate_files(str(tmp_path / "model.pt"), None) + M.validate_files(yaml.safe_load(str(tmp_path / "model.pt"))) def test_remove_none_values():