diff --git a/packages/models-library/src/models_library/api_schemas_webserver/activity.py b/packages/models-library/src/models_library/api_schemas_webserver/activity.py index ce1683bed78f..95384e574120 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/activity.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/activity.py @@ -18,7 +18,7 @@ class Limits(BaseModel): class Activity(BaseModel): stats: Stats limits: Limits - queued: bool | None = None # TODO: review since it in NOT filled + queued: bool | None = None ActivityStatusDict: TypeAlias = dict[NodeID, Activity] diff --git a/packages/models-library/src/models_library/basic_regex.py b/packages/models-library/src/models_library/basic_regex.py index 5df967c1128e..6ea5085bf05e 100644 --- a/packages/models-library/src/models_library/basic_regex.py +++ b/packages/models-library/src/models_library/basic_regex.py @@ -1,58 +1,57 @@ -"""Regular expressions patterns to build pydantic contrained strings - -- Variants of the patterns with 'Named Groups' captured are suffixed with NG_RE - -SEE tests_basic_regex.py for examples -""" - -# TODO: for every pattern we should have a formatter function -# NOTE: some sites to manualy check ideas -# https://regex101.com/ -# https://pythex.org/ -# - -# Universally unique Identifier. Pattern taken from https://stackoverflow.com/questions/136505/searching-for-uuids-in-text-with-regex import re from typing import Final UUID_RE_BASE = ( + # Universally unique Identifier. Pattern taken from https://stackoverflow.com/questions/136505/searching-for-uuids-in-text-with-regex r"[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}" ) UUID_RE = rf"^{UUID_RE_BASE}$" -# Formatted timestamps with date and time -DATE_RE = r"\d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z" +DATE_RE = ( + # Formatted timestamps with date and time + r"\d{4}-(12|11|10|0?[1-9])-(31|30|[0-2]?\d)T(2[0-3]|1\d|0?[0-9])(:(\d|[0-5]\d)){2}(\.\d{3})?Z" +) -# python-like version -SIMPLE_VERSION_RE = r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$" +SIMPLE_VERSION_RE = ( + # python-like version + r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$" +) # Semantic version # SEE https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string -# -# with capture groups: cg1 = major, cg2 = minor, cg3 = patch, cg4 = prerelease and cg5 = buildmetadata -SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS = r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" -# with named groups: major, minor, patch, prerelease and buildmetadata -SEMANTIC_VERSION_RE_W_NAMED_GROUPS = r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" + +SEMANTIC_VERSION_RE_W_CAPTURE_GROUPS = ( + # with capture groups: cg1 = major, cg2 = minor, cg3 = patch, cg4 = prerelease and cg5 = buildmetadata + r"^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" +) +SEMANTIC_VERSION_RE_W_NAMED_GROUPS = ( + # with named groups: major, minor, patch, prerelease and buildmetadata + r"^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" +) -# Regex to detect whether a string can be used as a variable identifier (see tests) -# - cannot start with spaces, _ (we only want public) or numbers -# https://docs.python.org/3/reference/lexical_analysis.html#identifiers -PUBLIC_VARIABLE_NAME_RE = r"^[^_\W0-9]\w*$" +PUBLIC_VARIABLE_NAME_RE = ( + # Regex to detect whether a string can be used as a variable identifier (see tests) + # - cannot start with spaces, _ (we only want public) or numbers + # https://docs.python.org/3/reference/lexical_analysis.html#identifiers + r"^[^_\W0-9]\w*$" +) MIME_TYPE_RE = ( r"([\w\*]*)\/(([\w\-\*]+\.)+)?([\w\-\*]+)(\+([\w\-\.]+))?(; ([\w+-\.=]+))?" ) -# Storage basic file ID -SIMCORE_S3_FILE_ID_RE = rf"^(exports\/\d+\/{UUID_RE_BASE}\.zip)|((api|({UUID_RE_BASE}))\/({UUID_RE_BASE})\/(.+)$)" +SIMCORE_S3_FILE_ID_RE = ( + # Storage basic file ID + rf"^(exports\/\d+\/{UUID_RE_BASE}\.zip)|((api|({UUID_RE_BASE}))\/({UUID_RE_BASE})\/(.+)$)" +) SIMCORE_S3_DIRECTORY_ID_RE = rf"^({UUID_RE_BASE})\/({UUID_RE_BASE})\/(.+)\/$" -# S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] S3_BUCKET_NAME_RE = re.compile( + # S3 - AWS bucket names [https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html] r"^(?!xn--)[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$(? str: return f"https://fakeimg.pl/100x100/ff0000%2C128/000%2C255/?text={quote(label)}" -class ServiceNotFound(KeyError): +class ServiceNotFoundError(KeyError): pass @@ -35,6 +38,135 @@ class _Record: is_under_development: bool = False +_TYPE_MAPPING = { + "number": float, + "integer": int, + "boolean": bool, + "string": str, + "data:*/*": str, + "ref_contentSchema": type[Any], +} + + +def _service_type_to_python_type(property_type: str) -> type[Any]: + """Convert service property type to Python type""" + # Fast lookup for exact matches + if mapped_type := _TYPE_MAPPING.get(property_type): + return mapped_type + + # Handle data: prefix patterns + if property_type.startswith("data:"): + return str + + # Default to Any for unknown types + return type[Any] + + +def validate_callable_signature( + implementation: Callable | None, + service_inputs: dict[str, ServiceInput] | None, + service_outputs: dict[str, ServiceOutput] | None, +) -> None: + """ + Validates that the callable signature matches the service inputs and outputs. + + Args: + implementation: The callable to validate + service_inputs: Dictionary of service input specifications + service_outputs: Dictionary of service output specifications + + Raises: + ValueError: If signature doesn't match the expected inputs/outputs + TypeError: If types are incompatible + """ + if implementation is None: + return + + sig = inspect.signature(implementation) + service_inputs = service_inputs or {} + service_outputs = service_outputs or {} + + # Validate input parameters + sig_params = list(sig.parameters.values()) + expected_input_count = len(service_inputs) + actual_input_count = len( + [ + p + for p in sig_params + if p.kind in (p.POSITIONAL_ONLY, p.POSITIONAL_OR_KEYWORD) + ] + ) + + if actual_input_count != expected_input_count: + msg = f"Function has {actual_input_count} parameters but service expects {expected_input_count} inputs" + raise ValueError(msg) + + # Check parameter types if type hints are available + for i, (input_key, input_spec) in enumerate(service_inputs.items()): + assert input_key # nosec + if i < len(sig_params): + param = sig_params[i] + expected_type = _service_type_to_python_type(input_spec.property_type) + + if param.annotation != inspect.Parameter.empty and expected_type != Any: + param_type = param.annotation + # Handle Union types and optional parameters + if get_origin(param_type) is not None: + param_types = get_args(param_type) + if expected_type not in param_types: + _logger.warning( + "Parameter '%s' type hint %s doesn't match expected service input type %s", + param.name, + param_type, + expected_type, + ) + elif param_type != expected_type: + _logger.warning( + "Parameter '%s' type hint %s doesn't match expected service input type %s", + param.name, + param_type, + expected_type, + ) + + # Validate return type + if service_outputs: + return_annotation = sig.return_annotation + if return_annotation != inspect.Signature.empty: + output_count = len(service_outputs) + + # If single output, return type should match directly + if output_count == 1: + output_spec = next(iter(service_outputs.values())) + expected_return_type = _service_type_to_python_type( + output_spec.property_type + ) + + if return_annotation not in {Any, expected_return_type}: + # Check if it's a Union type containing the expected type + if get_origin(return_annotation) is not None: + return_types = get_args(return_annotation) + if expected_return_type not in return_types: + _logger.warning( + "Return type %s doesn't match expected service output type %s", + return_annotation, + expected_return_type, + ) + else: + _logger.warning( + "Return type %s doesn't match expected service output type %s", + return_annotation, + expected_return_type, + ) + + # If multiple outputs, expect tuple or dict return type + elif output_count > 1: + if get_origin(return_annotation) not in (tuple, dict): + _logger.warning( + "Multiple outputs expected but return type %s is not tuple or dict", + return_annotation, + ) + + class FunctionServices: """Used to register a collection of function services""" @@ -46,6 +178,7 @@ def add( self, meta: ServiceMetaDataPublished, implementation: Callable | None = None, + *, is_under_development: bool = False, ): """ @@ -53,14 +186,15 @@ def add( """ if not isinstance(meta, ServiceMetaDataPublished): msg = f"Expected ServiceDockerData, got {type(meta)}" - raise ValueError(msg) + raise TypeError(msg) # ensure unique if (meta.key, meta.version) in self._functions: msg = f"{meta.key, meta.version} is already registered" raise ValueError(msg) - # TODO: ensure callable signature fits metadata + # Validate callable signature matches metadata + validate_callable_signature(implementation, meta.inputs, meta.outputs) # register self._functions[(meta.key, meta.version)] = _Record( @@ -71,8 +205,10 @@ def add( def extend(self, other: "FunctionServices"): # pylint: disable=protected-access - for f in other._functions.values(): - self.add(f.meta, f.implementation, f.is_under_development) + for f in other._functions.values(): # noqa: SLF001 + self.add( + f.meta, f.implementation, is_under_development=f.is_under_development + ) def _skip_dev(self): skip = True @@ -110,7 +246,7 @@ def get_implementation( func = self._functions[(service_key, service_version)] except KeyError as err: msg = f"{service_key}:{service_version} not found in registry" - raise ServiceNotFound(msg) from err + raise ServiceNotFoundError(msg) from err return func.implementation def get_metadata( @@ -121,7 +257,7 @@ def get_metadata( func = self._functions[(service_key, service_version)] except KeyError as err: msg = f"{service_key}:{service_version} not found in registry" - raise ServiceNotFound(msg) from err + raise ServiceNotFoundError(msg) from err return func.meta def __len__(self): diff --git a/packages/models-library/tests/test_function_services_catalog.py b/packages/models-library/tests/test_function_services_catalog.py index b5f0c21b0bc6..b103d9b591f1 100644 --- a/packages/models-library/tests/test_function_services_catalog.py +++ b/packages/models-library/tests/test_function_services_catalog.py @@ -11,11 +11,13 @@ FunctionServiceSettings, catalog, ) +from models_library.function_services_catalog._utils import validate_callable_signature from models_library.function_services_catalog.api import ( is_function_service, iter_service_docker_data, ) from models_library.services import ServiceMetaDataPublished +from models_library.services_io import ServiceInput, ServiceOutput @pytest.mark.parametrize( @@ -74,3 +76,159 @@ def test_catalog_registry(monkeypatch: pytest.MonkeyPatch): assert len(prod_services) < len(dev_services) assert prod_services.issubset(dev_services) + + +def test_validate_callable_signature_success(): + """Test that a correctly matching callable passes validation""" + + # Create service inputs/outputs matching _EXAMPLE + service_inputs = { + "input_1": ServiceInput( + label="Input data", + description="Any code, requirements or data file", + property_type="data:*/*", + ) + } + + service_outputs = { + "output_1": ServiceOutput( + label="Output data", + description="All data produced by the script is zipped as output_data.zip", + property_type="data:*/*", + file_to_key_map={"output_data.zip": "output_1"}, + ) + } + + # Define a matching callable + def matching_function(input_data: str) -> str: + return "processed_data" + + # Should not raise any exception + validate_callable_signature(matching_function, service_inputs, service_outputs) + + +def test_validate_callable_signature_parameter_count_mismatch(): + """Test that parameter count mismatch raises ValueError""" + + service_inputs = { + "input_1": ServiceInput( + label="Input 1", + description="First input", + property_type="number", + ), + "input_2": ServiceInput( + label="Input 2", + description="Second input", + property_type="number", + ), + } + + service_outputs = { + "output_1": ServiceOutput( + label="Output", + description="Result", + property_type="number", + ) + } + + # Function with wrong number of parameters + def wrong_param_count(only_one_param: float) -> float: + return only_one_param * 2 + + with pytest.raises( + ValueError, match="Function has 1 parameters but service expects 2 inputs" + ): + validate_callable_signature(wrong_param_count, service_inputs, service_outputs) + + +def test_validate_callable_signature_multiple_outputs_wrong_return_type(): + """Test that multiple outputs with non-tuple/dict return type logs warning""" + + service_inputs = { + "input_1": ServiceInput( + label="Input", + description="Input data", + property_type="number", + ) + } + + service_outputs = { + "output_1": ServiceOutput( + label="Output 1", + description="First output", + property_type="number", + ), + "output_2": ServiceOutput( + label="Output 2", + description="Second output", + property_type="string", + ), + } + + # Function returning single value instead of tuple/dict for multiple outputs + def wrong_return_type(input_val: float) -> float: # Should return tuple or dict + return input_val * 2 + + # Should log warning but not raise exception + with pytest.warns(None) as warning_list: + validate_callable_signature(wrong_return_type, service_inputs, service_outputs) + + +def test_validate_callable_signature_with_example_metadata(): + """Test validation using the example metadata from services_metadata_published.py""" + from models_library.services_metadata_published import ( + _EXAMPLE, + _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER, + ) + + # Create metadata from examples + example_meta = ServiceMetaDataPublished.model_validate(_EXAMPLE) + + # Define a matching callable for the example + def example_matching_function(input_1: str) -> str: + """Function that matches _EXAMPLE metadata""" + return "output_data.zip" + + # Should pass validation + validate_callable_signature( + example_matching_function, example_meta.inputs, example_meta.outputs + ) + + # Test with boot options example + boot_options_meta = ServiceMetaDataPublished.model_validate( + _EXAMPLE_W_BOOT_OPTIONS_AND_NO_DISPLAY_ORDER + ) + + # Should also pass validation (same inputs/outputs structure) + validate_callable_signature( + example_matching_function, boot_options_meta.inputs, boot_options_meta.outputs + ) + + +def test_validate_callable_signature_none_implementation(): + """Test that None implementation is handled gracefully""" + + service_inputs = { + "input_1": ServiceInput( + label="Input", description="Test", property_type="string" + ) + } + service_outputs = { + "output_1": ServiceOutput( + label="Output", description="Test", property_type="string" + ) + } + + # Should not raise any exception + validate_callable_signature(None, service_inputs, service_outputs) + + +def test_validate_callable_signature_empty_inputs_outputs(): + """Test validation with empty inputs/outputs""" + + def no_param_function() -> None: + pass + + # Should pass validation - no inputs expected, no outputs expected + validate_callable_signature(no_param_function, {}, {}) + validate_callable_signature(no_param_function, None, None) diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py index 5fa3cf22025e..1b649fb6050a 100644 --- a/packages/postgres-database/src/simcore_postgres_database/cli.py +++ b/packages/postgres-database/src/simcore_postgres_database/cli.py @@ -1,6 +1,4 @@ -""" command line interface for migration - -""" +"""command line interface for migration""" # pylint: disable=wildcard-import # pylint: disable=unused-wildcard-import @@ -70,7 +68,6 @@ def main(): def discover(**cli_inputs) -> dict | None: """Discovers databases and caches configs in ~/.simcore_postgres_database.json (except if --no-cache)""" # NOTE: Do not add defaults to user, password so we get a chance to ping urls - # TODO: if multiple candidates online, then query user to select click.echo("Discovering database ...") cli_cfg = {key: value for key, value in cli_inputs.items() if value is not None} diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/c78091da9a97_removed_projects_to_user_table.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/c78091da9a97_removed_projects_to_user_table.py new file mode 100644 index 000000000000..0b39e636228f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/c78091da9a97_removed_projects_to_user_table.py @@ -0,0 +1,48 @@ +"""removed projects_to_user table + +Revision ID: c78091da9a97 +Revises: 9dddb16914a4 +Create Date: 2025-10-06 17:20:05.083435+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "c78091da9a97" +down_revision = "9dddb16914a4" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("user_to_projects") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "user_to_projects", + sa.Column("id", sa.BIGINT(), autoincrement=True, nullable=False), + sa.Column("user_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("project_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["project_id"], + ["projects.id"], + name=op.f("fk_user_to_projects_id_projects"), + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["users.id"], + name=op.f("fk_user_to_projects_id_users"), + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name=op.f("user_to_projects_pkey")), + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/services_consume_filetypes.py b/packages/postgres-database/src/simcore_postgres_database/models/services_consume_filetypes.py index 65c6c8546b38..b759178697b2 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/services_consume_filetypes.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/services_consume_filetypes.py @@ -1,23 +1,16 @@ """ - Establishes which services can consume a given filetype +Establishes which services can consume a given filetype - The relation is N-N because - - a service could handle one or more filetypes and - - one filetype could be handled by one or more services +The relation is N-N because +- a service could handle one or more filetypes and +- one filetype could be handled by one or more services """ + import sqlalchemy as sa from ._common import RefActions from .base import metadata -# -# TODO: This information SHALL be defined in service metadata upon publication -# and the catalog service, using e.g. a background task, -# can automatically fill this table with services that elligable (e.g. shared with everybody) -# to consume given filetypes. Notice also that service "matching" will also be determined in a near -# future by more complex metadata -# - services_consume_filetypes = sa.Table( "services_consume_filetypes", metadata, diff --git a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py deleted file mode 100644 index 4a66e0be6118..000000000000 --- a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py +++ /dev/null @@ -1,38 +0,0 @@ -import sqlalchemy as sa - -from ._common import RefActions -from .base import metadata -from .projects import projects -from .users import users - -# DEPRECATED!!!!!!!!!!!!!! DO NOT USE!!!!!!! -user_to_projects = sa.Table( - "user_to_projects", - metadata, - sa.Column("id", sa.BigInteger, nullable=False, primary_key=True), - sa.Column( - "user_id", - sa.BigInteger, - sa.ForeignKey( - users.c.id, - name="fk_user_to_projects_id_users", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - ), - sa.Column( - "project_id", - sa.BigInteger, - sa.ForeignKey( - projects.c.id, - name="fk_user_to_projects_id_projects", - ondelete=RefActions.CASCADE, - onupdate=RefActions.CASCADE, - ), - nullable=False, - ), - # TODO: do not ondelete=cascase for project_id or it will delete SHARED PROJECT - # add instead sa.UniqueConstraint('user_id', 'project_id', name='user_project_uniqueness'), - # -) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/constants.py b/packages/pytest-simcore/src/pytest_simcore/helpers/constants.py index 5d517b9a0710..0076b38fe3b8 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/constants.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/constants.py @@ -1,10 +1,7 @@ -# -# TODO: for the moment, it has to be py3.6 compatible. Please do not use e.g. Final +from typing import Final # time - -MINUTE: int = 60 # secs - +MINUTE: Final[int] = 60 # in seconds # string templates -HEADER_STR: str = "{:-^100}\n" +HEADER_STR: Final[str] = "{:-^100}\n" diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/typing_docker.py b/packages/pytest-simcore/src/pytest_simcore/helpers/typing_docker.py index aaf1811a437b..8b2f5c7b1507 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/typing_docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/typing_docker.py @@ -55,13 +55,6 @@ class StatusDict(TypedDict): Timestamp: str State: str Message: str - - # TODO: represent each state of StatusDict as - # class TaskDict: - # Status: Union[ StatusDict0, StatusDict1, etc]? - # e.g. in StatusDict1 we add - # ContainerStatus: - PortStatus: dict diff --git a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py index d8cd056c1154..29c348eeb43f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py +++ b/packages/pytest-simcore/src/pytest_simcore/httpx_calls_capture.py @@ -134,8 +134,6 @@ def _wrapper(*args, **kwargs): spy: MockType = mocker.patch(spy_target, side_effect=_wrapper) spy.httpx_calls_capture_path = spy_httpx_calls_capture_path - # TODO: respx.api.stop(clear=False, reset=False) - return spy return None diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py index b885b62232fb..7f53de138947 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py @@ -24,19 +24,18 @@ def webserver_endpoint( return URL(f"http://{endpoint}") -@pytest.fixture(scope="function") +@pytest.fixture async def webserver_service(webserver_endpoint: URL, docker_stack: dict) -> URL: await wait_till_webserver_responsive(webserver_endpoint) + return webserver_endpoint - yield webserver_endpoint - -# TODO: this can be used by ANY of the simcore services! @tenacity.retry(**ServiceRetryPolicyUponInitialization().kwargs) async def wait_till_webserver_responsive(webserver_endpoint: URL): - async with aiohttp.ClientSession() as session: - async with session.get(webserver_endpoint.with_path("/v0/")) as resp: - # NOTE: Health-check endpoint require only a - # status code 200 (see e.g. services/web/server/docker/healthcheck.py) - # regardless of the payload content - assert resp.status == 200 + async with aiohttp.ClientSession() as session, session.get( + webserver_endpoint.with_path("/v0/") + ) as resp: + # NOTE: Health-check endpoint require only a + # status code 200 (see e.g. services/web/server/docker/healthcheck.py) + # regardless of the payload content + assert resp.status == 200 diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py index 881e91731854..81e3dbcec3ec 100644 --- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py @@ -43,16 +43,17 @@ async def traefik_service( return traefik_endpoints -# TODO: this can be used by ANY of the simcore services! @tenacity.retry(**ServiceRetryPolicyUponInitialization().kwargs) async def wait_till_traefik_responsive(api_endpoint: URL): - async with aiohttp.ClientSession() as session: - async with session.get(api_endpoint.with_path("/api/http/routers")) as resp: - assert resp.status == 200 - data = await resp.json() - for proxied_service in data: - assert "service" in proxied_service - if "webserver" in proxied_service["service"]: - assert proxied_service["status"] == "enabled" - elif "api-server" in proxied_service["service"]: - assert proxied_service["status"] == "enabled" + async with aiohttp.ClientSession() as session, session.get( + api_endpoint.with_path("/api/http/routers") + ) as resp: + assert resp.status == 200 + data = await resp.json() + for proxied_service in data: + assert "service" in proxied_service + if ( + "webserver" in proxied_service["service"] + or "api-server" in proxied_service["service"] + ): + assert proxied_service["status"] == "enabled" diff --git a/packages/service-integration/src/service_integration/oci_image_spec.py b/packages/service-integration/src/service_integration/oci_image_spec.py index 84f779150ff4..cf9d980f7600 100644 --- a/packages/service-integration/src/service_integration/oci_image_spec.py +++ b/packages/service-integration/src/service_integration/oci_image_spec.py @@ -1,4 +1,4 @@ -""" Support for Open Container Initiative (OCI) +"""Support for Open Container Initiative (OCI) SEE https://opencontainers.org SEE https://github.com/opencontainers @@ -45,7 +45,6 @@ def _underscore_as_dot(field_name: str): class OciImageSpecAnnotations(BaseModel): - # TODO: review and polish constraints created: Annotated[ datetime | None, @@ -99,13 +98,12 @@ class OciImageSpecAnnotations(BaseModel): ), ] = None - # SEE https://spdx.dev/spdx-specification-21-web-version/#h.jxpfx0ykyb60 licenses: Annotated[ str, Field( - description="License(s) under which contained software is distributed as an SPDX License Expression.", + description="License(s) under which contained software is distributed as an SPDX License Expression. See https://spdx.org/licenses/", ), - ] = "MIT" + ] = "MIT" # https://spdx.org/licenses/MIT.html ref_name: Annotated[ str | None, @@ -131,7 +129,11 @@ class OciImageSpecAnnotations(BaseModel): ] = None model_config = ConfigDict( - alias_generator=_underscore_as_dot, populate_by_name=True, extra="forbid" + alias_generator=_underscore_as_dot, + populate_by_name=True, + extra="forbid", + str_strip_whitespace=True, + str_min_length=1, ) @classmethod diff --git a/packages/service-integration/src/service_integration/osparc_config.py b/packages/service-integration/src/service_integration/osparc_config.py index 9557d0d5ef34..2011022f9ebb 100644 --- a/packages/service-integration/src/service_integration/osparc_config.py +++ b/packages/service-integration/src/service_integration/osparc_config.py @@ -139,7 +139,6 @@ def image_name(self, settings: AppSettings, registry="local") -> str: service_path = self.key if registry in "dockerhub": # dockerhub allows only one-level names -> dot it - # TODO: check thisname is compatible with REGEX service_path = TypeAdapter(ServiceKey).validate_python( service_path.replace("/", ".") ) diff --git a/packages/service-integration/src/service_integration/settings.py b/packages/service-integration/src/service_integration/settings.py index f8b977cc9a4b..efd21d739e3c 100644 --- a/packages/service-integration/src/service_integration/settings.py +++ b/packages/service-integration/src/service_integration/settings.py @@ -27,10 +27,7 @@ class AppSettings(BaseSettings): COMPOSE_VERSION: str = Field( "3.7", description="version of the docker-compose spec" ) + model_config = SettingsConfigDict( env_file_encoding="utf-8", ) - - # TODO: load from ~/.osparc/service-integration.json or env file - # TODO: add access to secrets - # SEE https://pydantic-docs.helpmanual.io/usage/settings/#adding-sources diff --git a/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py b/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py index 23fcbb41f0a3..ff2305223ddb 100644 --- a/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py @@ -11,10 +11,8 @@ SEE for extra keywords: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS """ -# TODO: Towards implementing https://github.com/ITISFoundation/osparc-simcore/issues/1195 -# TODO: deprecate this module. Move utils into retry_policies, simcore_postgres_database.utils_aiopg - import logging +import warnings from typing import Final import sqlalchemy as sa @@ -34,6 +32,14 @@ APP_AIOPG_ENGINE_KEY: Final = web.AppKey("APP_AIOPG_ENGINE_KEY", Engine) +warnings.warn( + "This module uses aiopg which is deprecated in this repository. " + "Currently using sqlalchemy.ext.asyncio with asyncpg." + "See details of migration in https://github.com/ITISFoundation/osparc-simcore/issues/4529", + DeprecationWarning, + stacklevel=2, +) + async def raise_if_not_responsive(engine: Engine): async with engine.acquire() as conn: @@ -59,7 +65,6 @@ async def is_pg_responsive(engine: Engine, *, raise_if_fails=False) -> bool: def init_pg_tables(dsn: DataSourceName, schema: sa.schema.MetaData): try: # CONS: creates and disposes an engine just to create tables - # TODO: find a way to create all tables with aiopg engine sa_engine = sa.create_engine(dsn.to_uri(with_query=True)) schema.create_all(sa_engine) finally: @@ -86,7 +91,6 @@ def raise_http_unavailable_error(retry_state: RetryCallState): # SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions # SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions - # TODO: add header with Retry-After https://tools.ietf.org/html/rfc7231#section-7.1.3 resp = web.HTTPServiceUnavailable() # logs diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/files.py b/services/api-server/src/simcore_service_api_server/api/dependencies/files.py deleted file mode 100644 index ae3e1009ef5a..000000000000 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/files.py +++ /dev/null @@ -1,19 +0,0 @@ -from fastapi import Header -from pydantic.types import PositiveInt - -# -# Based on discussion https://github.com/tiangolo/fastapi/issues/362#issuecomment-584104025 -# -# TODO: add heuristics with max file size to config Timeout? -# SEE api/routes/files.py::upload_file -# - -GB = 1024 * 1024 * 1024 -MAX_UPLOAD_SIZE = 1 * GB # TODO: settings? - - -async def valid_content_length( - content_length: PositiveInt = Header(..., lt=MAX_UPLOAD_SIZE) -): - # TODO: use this to replace content_length: Optional[str] = Header(None), - return content_length diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index 1e3c6d6d0e68..99b6a2fabb29 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -112,8 +112,6 @@ class JobInputs(BaseModel): # NOTE: this is different from the resource JobInput (TBD) values: KeywordArguments - # TODO: gibt es platz fuer metadata? - model_config = ConfigDict( frozen=True, json_schema_extra={ diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 86eae20daf93..ee696c2291be 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -12,12 +12,8 @@ x-tracing-open-telemetry: &tracing_open_telemetry_environs TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} -x-webserver-diagnostics: &webserver_diagnostics_environs - DIAGNOSTICS_HEALTHCHECK_ENABLED: ${DIAGNOSTICS_HEALTHCHECK_ENABLED} - DIAGNOSTICS_MAX_AVG_LATENCY: ${DIAGNOSTICS_MAX_AVG_LATENCY} - DIAGNOSTICS_MAX_TASK_DELAY: ${DIAGNOSTICS_MAX_TASK_DELAY} - DIAGNOSTICS_SLOW_DURATION_SECS: ${DIAGNOSTICS_SLOW_DURATION_SECS} +## 3rd party services x-postgres-settings: &postgres_settings POSTGRES_DB: ${POSTGRES_DB} POSTGRES_HOST: ${POSTGRES_HOST} @@ -29,6 +25,116 @@ x-postgres-settings: &postgres_settings POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} +x-prometheus-settings: &prometheus_settings + PROMETHEUS_URL: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_URL} + PROMETHEUS_USERNAME: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_USERNAME} + PROMETHEUS_PASSWORD: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_PASSWORD} + + + +x-rabbit-settings: &rabbit_settings + RABBIT_HOST: ${RABBIT_HOST} + RABBIT_PASSWORD: ${RABBIT_PASSWORD} + RABBIT_PORT: ${RABBIT_PORT} + RABBIT_SECURE: ${RABBIT_SECURE} + RABBIT_USER: ${RABBIT_USER} + +x-redis-settings: &redis_settings + REDIS_HOST: ${REDIS_HOST} + REDIS_PORT: ${REDIS_PORT} + REDIS_SECURE: ${REDIS_SECURE} + REDIS_USER: ${REDIS_USER} + REDIS_PASSWORD: ${REDIS_PASSWORD} + +x-registry-settings: ®istry_settings + REGISTRY_AUTH: ${REGISTRY_AUTH} + REGISTRY_PATH: ${REGISTRY_PATH} + REGISTRY_PW: ${REGISTRY_PW} + REGISTRY_SSL: ${REGISTRY_SSL} + REGISTRY_URL: ${REGISTRY_URL} + REGISTRY_USER: ${REGISTRY_USER} + +x-s3-settings: &s3_settings + S3_ACCESS_KEY: ${S3_ACCESS_KEY} + S3_BUCKET_NAME: ${S3_BUCKET_NAME} + S3_ENDPOINT: ${S3_ENDPOINT} + S3_REGION: ${S3_REGION} + S3_SECRET_KEY: ${S3_SECRET_KEY} + +x-ec2-access-settings: &ec2_access_settings + EC2_ACCESS_KEY_ID: ${AUTOSCALING_EC2_ACCESS_KEY_ID} + EC2_SECRET_ACCESS_KEY: ${AUTOSCALING_EC2_SECRET_ACCESS_KEY} + EC2_REGION_NAME: ${AUTOSCALING_EC2_REGION_NAME} + + +x-smtp-settings: &smtp_settings + SMTP_HOST: ${SMTP_HOST} + SMTP_PORT: ${SMTP_PORT} + SMTP_USERNAME: ${SMTP_USERNAME} + SMTP_PASSWORD: ${SMTP_PASSWORD} + SMTP_PROTOCOL: ${SMTP_PROTOCOL} + + + +## simcore stack services +x-common-logging: &common_logging_environs + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} + +x-webserver-diagnostics: &webserver_diagnostics_environs + DIAGNOSTICS_HEALTHCHECK_ENABLED: ${DIAGNOSTICS_HEALTHCHECK_ENABLED} + DIAGNOSTICS_MAX_AVG_LATENCY: ${DIAGNOSTICS_MAX_AVG_LATENCY} + DIAGNOSTICS_MAX_TASK_DELAY: ${DIAGNOSTICS_MAX_TASK_DELAY} + DIAGNOSTICS_SLOW_DURATION_SECS: ${DIAGNOSTICS_SLOW_DURATION_SECS} + +x-catalog-settings: &catalog_settings + CATALOG_HOST: ${CATALOG_HOST} + CATALOG_PORT: ${CATALOG_PORT} + +x-director-v2-settings: &director_v2_settings + DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} + DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} + +x-storage-settings: &storage_settings + STORAGE_HOST: ${STORAGE_HOST} + STORAGE_PORT: ${STORAGE_PORT} + +x-webserver-settings: &webserver_settings + WEBSERVER_HOST: ${WEBSERVER_HOST} + WEBSERVER_PORT: ${WEBSERVER_PORT} + +x-webserver-session-settings: &webserver_session_settings + SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} + SESSION_COOKIE_MAX_AGE: ${SESSION_COOKIE_MAX_AGE} + SESSION_COOKIE_SAMESITE: ${SESSION_COOKIE_SAMESITE} + SESSION_COOKIE_SECURE: ${SESSION_COOKIE_SECURE} + SESSION_COOKIE_HTTPONLY: ${SESSION_COOKIE_HTTPONLY} + + +x-resource-usage-tracker-settings: &resource_usage_tracker_settings + RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} + RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} + + +x-swarm-stack-settings: &swarm_stack_settings + SWARM_STACK_NAME: ${SWARM_STACK_NAME} + + +x-payments-settings: &payments_settings + PAYMENTS_HOST: ${PAYMENTS_HOST} + PAYMENTS_PORT: ${PAYMENTS_PORT} + PAYMENTS_USERNAME: ${PAYMENTS_USERNAME} + PAYMENTS_PASSWORD: ${PAYMENTS_PASSWORD} + +x-invitations-settings: &invitations_settings + INVITATIONS_HOST: ${INVITATIONS_HOST} + INVITATIONS_PORT: ${INVITATIONS_PORT} + INVITATIONS_USERNAME: ${INVITATIONS_USERNAME} + INVITATIONS_PASSWORD: ${INVITATIONS_PASSWORD} + INVITATIONS_SECRET_KEY: ${INVITATIONS_SECRET_KEY} + INVITATIONS_OSPARC_URL: ${INVITATIONS_OSPARC_URL} + + services: api-server: image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest} @@ -38,35 +144,17 @@ services: <<: - *tracing_open_telemetry_environs - *postgres_settings + - *rabbit_settings + - *redis_settings + - *catalog_settings + - *director_v2_settings + - *storage_settings API_SERVER_DEV_FEATURES_ENABLED: ${API_SERVER_DEV_FEATURES_ENABLED} - API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - API_SERVER_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} API_SERVER_LOGLEVEL: ${API_SERVER_LOGLEVEL} API_SERVER_PROFILING: ${API_SERVER_PROFILING} API_SERVER_WORKER_MODE: "false" - CATALOG_HOST: ${CATALOG_HOST} - CATALOG_PORT: ${CATALOG_PORT} - - DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} - DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - - STORAGE_HOST: ${STORAGE_HOST} - STORAGE_PORT: ${STORAGE_PORT} - WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST} WEBSERVER_PORT: ${WB_API_WEBSERVER_PORT} WEBSERVER_RPC_NAMESPACE: ${WB_API_WEBSERVER_HOST} @@ -114,7 +202,14 @@ services: networks: - autoscaling_subnet environment: - <<: *tracing_open_telemetry_environs + <<: + - *tracing_open_telemetry_environs + - *common_logging_environs + - *rabbit_settings + - *redis_settings + - *registry_settings + - *ec2_access_settings + AUTOSCALING_LOGLEVEL: ${AUTOSCALING_LOGLEVEL} AUTOSCALING_POLL_INTERVAL: ${AUTOSCALING_POLL_INTERVAL} AUTOSCALING_DRAIN_NODES_WITH_LABELS: ${AUTOSCALING_DRAIN_NODES_WITH_LABELS} @@ -155,23 +250,6 @@ services: SSM_SECRET_ACCESS_KEY: ${SSM_SECRET_ACCESS_KEY} SSM_REGION_NAME: ${SSM_REGION_NAME} - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - REDIS_HOST: ${REDIS_HOST} - REDIS_PASSWORD: ${REDIS_PASSWORD} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REGISTRY_USER: ${REGISTRY_USER} - REGISTRY_PW: ${REGISTRY_PW} - REGISTRY_URL: ${REGISTRY_URL} - REGISTRY_SSL: ${REGISTRY_SSL} - REGISTRY_AUTH: ${REGISTRY_AUTH} AUTOSCALING_TRACING: ${AUTOSCALING_TRACING} volumes: - "/var/run/docker.sock:/var/run/docker.sock" @@ -193,8 +271,13 @@ services: hostname: "cat-{{.Node.Hostname}}-{{.Task.Slot}}" environment: <<: - - *postgres_settings - *tracing_open_telemetry_environs + - *postgres_settings + - *rabbit_settings + - *common_logging_environs + - *catalog_settings + - *director_v2_settings + CATALOG_BACKGROUND_TASK_REST_TIME: ${CATALOG_BACKGROUND_TASK_REST_TIME} CATALOG_DEV_FEATURES_ENABLED: ${CATALOG_DEV_FEATURES_ENABLED} CATALOG_LOGLEVEL: ${CATALOG_LOGLEVEL} @@ -205,14 +288,7 @@ services: DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} DIRECTOR_HOST: ${DIRECTOR_HOST:-director} DIRECTOR_PORT: ${DIRECTOR_PORT:-8080} - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} CATALOG_TRACING: ${CATALOG_TRACING} networks: - default @@ -224,7 +300,12 @@ services: networks: - default environment: - <<: *tracing_open_telemetry_environs + <<: + - *tracing_open_telemetry_environs + - *rabbit_settings + - *redis_settings + - *common_logging_environs + CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG} CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH} CLUSTERS_KEEPER_DASK_NPROCS: ${CLUSTERS_KEEPER_DASK_NPROCS} @@ -245,8 +326,6 @@ services: CLUSTERS_KEEPER_SSM_REGION_NAME: ${CLUSTERS_KEEPER_SSM_REGION_NAME} CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY: ${CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY} CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: ${CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX} - LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: ${CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES} PRIMARY_EC2_INSTANCES_ALLOWED_TYPES: ${PRIMARY_EC2_INSTANCES_ALLOWED_TYPES} PRIMARY_EC2_INSTANCES_KEY_NAME: ${PRIMARY_EC2_INSTANCES_KEY_NAME} @@ -263,16 +342,6 @@ services: PRIMARY_EC2_INSTANCES_MAX_START_TIME: ${PRIMARY_EC2_INSTANCES_MAX_START_TIME} PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL: ${PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL} PRIMARY_EC2_INSTANCES_RABBIT: ${PRIMARY_EC2_INSTANCES_RABBIT} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} SWARM_STACK_NAME: ${SWARM_STACK_NAME} CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES: ${CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES} WORKERS_EC2_INSTANCES_ALLOWED_TYPES: ${WORKERS_EC2_INSTANCES_ALLOWED_TYPES} @@ -295,6 +364,8 @@ services: <<: - *postgres_settings - *tracing_open_telemetry_environs + - *registry_settings + DIRECTOR_DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY} DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} @@ -307,13 +378,6 @@ services: DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} DIRECTOR_TRACING: ${DIRECTOR_TRACING} - REGISTRY_AUTH: ${REGISTRY_AUTH} - REGISTRY_PATH: ${REGISTRY_PATH} - REGISTRY_PW: ${REGISTRY_PW} - REGISTRY_SSL: ${REGISTRY_SSL} - REGISTRY_URL: ${REGISTRY_URL} - REGISTRY_USER: ${REGISTRY_USER} - SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} SWARM_STACK_NAME: ${SWARM_STACK_NAME} @@ -337,7 +401,12 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings + - *registry_settings + - *s3_settings - *tracing_open_telemetry_environs + AWS_S3_CLI_S3: ${AWS_S3_CLI_S3} CATALOG_HOST: ${CATALOG_HOST} @@ -383,34 +452,11 @@ services: EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - - REGISTRY_AUTH: ${REGISTRY_AUTH} - REGISTRY_PATH: ${REGISTRY_PATH} - REGISTRY_PW: ${REGISTRY_PW} - REGISTRY_SSL: ${REGISTRY_SSL} - REGISTRY_URL: ${REGISTRY_URL} - REGISTRY_USER: ${REGISTRY_USER} DIRECTOR_V2_DOCKER_HUB_REGISTRY: ${DIRECTOR_V2_DOCKER_HUB_REGISTRY} RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} - S3_ACCESS_KEY: ${S3_ACCESS_KEY} - S3_BUCKET_NAME: ${S3_BUCKET_NAME} - S3_ENDPOINT: ${S3_ENDPOINT} - S3_REGION: ${S3_REGION} - S3_SECRET_KEY: ${S3_SECRET_KEY} STORAGE_HOST: ${STORAGE_HOST} STORAGE_PORT: ${STORAGE_PORT} @@ -447,19 +493,12 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings - *tracing_open_telemetry_environs + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - REDIS_HOST: ${REDIS_HOST} - REDIS_PASSWORD: ${REDIS_PASSWORD} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} SC_USER_ID: ${SC_USER_ID} SC_USER_NAME: ${SC_USER_NAME} EFS_USER_ID: ${EFS_USER_ID} @@ -479,7 +518,8 @@ services: networks: - default environment: - <<: *tracing_open_telemetry_environs + <<: + - *tracing_open_telemetry_environs INVITATIONS_DEFAULT_PRODUCT: ${INVITATIONS_DEFAULT_PRODUCT} INVITATIONS_LOGLEVEL: ${INVITATIONS_LOGLEVEL} INVITATIONS_OSPARC_URL: ${INVITATIONS_OSPARC_URL} @@ -501,6 +541,7 @@ services: <<: - *postgres_settings - *tracing_open_telemetry_environs + - *rabbit_settings LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} @@ -521,11 +562,6 @@ services: PAYMENTS_SWAGGER_API_DOC_ENABLED: ${PAYMENTS_SWAGGER_API_DOC_ENABLED} PAYMENTS_TRACING: ${PAYMENTS_TRACING} PAYMENTS_USERNAME: ${PAYMENTS_USERNAME} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} SMTP_HOST: ${SMTP_HOST} @@ -544,6 +580,8 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings - *tracing_open_telemetry_environs LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} @@ -552,16 +590,6 @@ services: PROMETHEUS_URL: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_URL} PROMETHEUS_USERNAME: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_USERNAME} PROMETHEUS_PASSWORD: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_PASSWORD} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} RESOURCE_USAGE_TRACKER_LOGLEVEL: ${RESOURCE_USAGE_TRACKER_LOGLEVEL} RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED} RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC} @@ -581,6 +609,8 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings - *tracing_open_telemetry_environs CATALOG_HOST: ${CATALOG_HOST} @@ -605,18 +635,6 @@ services: LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - - REDIS_HOST: ${REDIS_HOST} - REDIS_PASSWORD: ${REDIS_PASSWORD} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - docker-api-proxy: image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} @@ -694,8 +712,19 @@ services: environment: &webserver_environment <<: - *postgres_settings + - *rabbit_settings + - *redis_settings - *tracing_open_telemetry_environs - *webserver_diagnostics_environs + - *catalog_settings + - *director_v2_settings + - *storage_settings + - *resource_usage_tracker_settings + - *webserver_session_settings + - *swarm_stack_settings + - *smtp_settings + - *payments_settings + - *invitations_settings AIODEBUG_SLOW_DURATION_SECS: ${AIODEBUG_SLOW_DURATION_SECS} @@ -703,7 +732,6 @@ services: WEBSERVER_DEV_FEATURES_ENABLED: ${WEBSERVER_DEV_FEATURES_ENABLED} WEBSERVER_REALTIME_COLLABORATION: ${WEBSERVER_REALTIME_COLLABORATION} - WEBSERVER_LOGLEVEL: ${WEBSERVER_LOGLEVEL} WEBSERVER_PROFILING: ${WEBSERVER_PROFILING} @@ -793,13 +821,6 @@ services: PAYMENTS_PORT: ${PAYMENTS_PORT} PAYMENTS_USERNAME: ${PAYMENTS_USERNAME} - # WEBSERVER_REDIS - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - # WEBSERVER_REST REST_SWAGGER_API_DOC_ENABLED: ${REST_SWAGGER_API_DOC_ENABLED} @@ -844,13 +865,6 @@ services: PROJECTS_MAX_COPY_SIZE_BYTES: ${PROJECTS_MAX_COPY_SIZE_BYTES} PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES: ${PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES} - # WEBSERVER_RABBITMQ - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - # WEBSERVER_TRASH TRASH_RETENTION_DAYS: ${TRASH_RETENTION_DAYS} @@ -927,6 +941,7 @@ services: init: true hostname: "api-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: + <<: *webserver_environment WEBSERVER_FUNCTIONS: ${WEBSERVER_FUNCTIONS} # needed for api-server WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST} @@ -946,6 +961,9 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings + WEBSERVER_LOGLEVEL: ${WB_DB_EL_LOGLEVEL} # NOTE: keep in sync with the prefix form the hostname @@ -1000,19 +1018,6 @@ services: WEBSERVER_USERS: ${WB_DB_EL_USERS} WEBSERVER_WALLETS: ${WB_DB_EL_WALLETS} - # WEBSERVER_RABBITMQ - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - - # WEBSERVER_REDIS - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} RESOURCE_MANAGER_RESOURCE_TTL_S: ${RESOURCE_MANAGER_RESOURCE_TTL_S} @@ -1031,6 +1036,8 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings + - *redis_settings - *tracing_open_telemetry_environs # WEBSERVER_DIRECTOR_V2 @@ -1045,20 +1052,6 @@ services: # NOTE: keep in sync with the prefix form the hostname LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: gc - # WEBSERVER_RABBITMQ - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - - # WEBSERVER_REDIS - REDIS_HOST: ${REDIS_HOST} - REDIS_PASSWORD: ${REDIS_PASSWORD} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - # WEBSERVER_RESOURCE_MANAGER RESOURCE_MANAGER_RESOURCE_TTL_S: ${WB_GC_RESOURCE_MANAGER_RESOURCE_TTL_S} @@ -1137,7 +1130,6 @@ services: GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - # WEBSERVER_DIAGNOSTICS WEBSERVER_DIAGNOSTICS: ${WB_AUTH_DIAGNOSTICS} @@ -1208,6 +1200,10 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock environment: + <<: + - *rabbit_settings + - *tracing_open_telemetry_environs + AGENT_LOGLEVEL: ${AGENT_LOGLEVEL} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} @@ -1218,14 +1214,7 @@ services: AGENT_VOLUMES_CLEANUP_S3_BUCKET: ${AGENT_VOLUMES_CLEANUP_S3_BUCKET} AGENT_VOLUMES_CLEANUP_S3_PROVIDER: ${AGENT_VOLUMES_CLEANUP_S3_PROVIDER} AGENT_DOCKER_NODE_ID: "{{.Node.ID}}" - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_USER: ${RABBIT_USER} - RABBIT_SECURE: ${RABBIT_SECURE} - AGENT_TRACING: ${AGENT_TRACING} - <<: *tracing_open_telemetry_environs notifications: image: ${DOCKER_REGISTRY:-itisfoundation}/notifications:${DOCKER_IMAGE_TAG:-latest} @@ -1235,6 +1224,7 @@ services: environment: <<: - *postgres_settings + - *rabbit_settings - *tracing_open_telemetry_environs LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} @@ -1242,12 +1232,6 @@ services: NOTIFICATIONS_LOGLEVEL: ${NOTIFICATIONS_LOGLEVEL} NOTIFICATIONS_TRACING: ${NOTIFICATIONS_TRACING} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - dask-sidecar: image: ${DOCKER_REGISTRY:-itisfoundation}/dask-sidecar:${DOCKER_IMAGE_TAG:-latest} init: true @@ -1263,6 +1247,9 @@ services: - computational_shared_data:${SIDECAR_COMP_SERVICES_SHARED_FOLDER:-/home/scu/computational_shared_data} - /var/run/docker.sock:/var/run/docker.sock environment: &sidecar-environment + <<: + - *rabbit_settings + DASK_TLS_CA_FILE: ${DASK_TLS_CA_FILE} DASK_TLS_KEY: ${DASK_TLS_KEY} DASK_TLS_CERT: ${DASK_TLS_CERT} @@ -1272,11 +1259,6 @@ services: DASK_SIDECAR_LOGLEVEL: ${DASK_SIDECAR_LOGLEVEL} SIDECAR_COMP_SERVICES_SHARED_VOLUME_NAME: ${SWARM_STACK_NAME}_computational_shared_data SIDECAR_COMP_SERVICES_SHARED_FOLDER: ${SIDECAR_COMP_SERVICES_SHARED_FOLDER:-/home/scu/computational_shared_data} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} networks: - computational_services_subnet secrets: *dask_tls_secrets @@ -1300,10 +1282,13 @@ services: networks: - storage_subnet environment: + <<: + - *tracing_open_telemetry_environs + DATCORE_ADAPTER_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} DATCORE_ADAPTER_TRACING: ${DATCORE_ADAPTER_TRACING} - <<: *tracing_open_telemetry_environs + storage: image: ${DOCKER_REGISTRY:-itisfoundation}/storage:${DOCKER_IMAGE_TAG:-latest} @@ -1312,25 +1297,13 @@ services: environment: &storage_environment <<: - *postgres_settings + - *rabbit_settings + - *redis_settings + - *s3_settings - *tracing_open_telemetry_environs DATCORE_ADAPTER_HOST: ${DATCORE_ADAPTER_HOST:-datcore-adapter} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} - RABBIT_HOST: ${RABBIT_HOST} - RABBIT_PASSWORD: ${RABBIT_PASSWORD} - RABBIT_PORT: ${RABBIT_PORT} - RABBIT_SECURE: ${RABBIT_SECURE} - RABBIT_USER: ${RABBIT_USER} - REDIS_HOST: ${REDIS_HOST} - REDIS_PORT: ${REDIS_PORT} - REDIS_SECURE: ${REDIS_SECURE} - REDIS_USER: ${REDIS_USER} - REDIS_PASSWORD: ${REDIS_PASSWORD} - S3_ACCESS_KEY: ${S3_ACCESS_KEY} - S3_BUCKET_NAME: ${S3_BUCKET_NAME} - S3_ENDPOINT: ${S3_ENDPOINT} - S3_REGION: ${S3_REGION} - S3_SECRET_KEY: ${S3_SECRET_KEY} STORAGE_WORKER_MODE: "false" STORAGE_LOGLEVEL: ${STORAGE_LOGLEVEL} STORAGE_MONITORING_ENABLED: 1 diff --git a/services/web/server/src/simcore_service_webserver/projects/utils.py b/services/web/server/src/simcore_service_webserver/projects/utils.py index 63697ea6f172..892538375d6e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/utils.py @@ -116,18 +116,12 @@ def substitute_parameterized_inputs( """ project = deepcopy(parameterized_project) - # TODO: optimize value normalization - def _num(s): - try: - return int(s) - except ValueError: - return float(s) - - def _normalize_value(s): - try: - return _num(s) - except ValueError: - return s + # Use smart union for type conversion - tries int, then float, then str + _value_adapter = TypeAdapter(int | float | str) + + def _normalize_value(value: str) -> int | float | str: + """Normalize string value to appropriate type using Pydantic smart unions.""" + return _value_adapter.validate_python(value) def _get_param_input_match(name, value, access) -> Match[str] | None: if ( @@ -143,18 +137,16 @@ def _get_param_input_match(name, value, access) -> Match[str] | None: new_inputs = {} for name, value in inputs.items(): - match = _get_param_input_match(name, value, access) - if match: - # TODO: use jinja2 to interpolate expressions? - value = match.group(1) - if value in parameters: - new_inputs[name] = _normalize_value(parameters[value]) - else: - _logger.warning( - "Could not resolve parameter %s. No value provided in %s", - value, - parameters, - ) + if (match := _get_param_input_match(name, value, access)) and ( + param_name := match.group(1) + ) in parameters: + new_inputs[name] = _normalize_value(parameters[param_name]) + elif match: + _logger.warning( + "Could not resolve parameter %s. No value provided in %s", + param_name, + parameters, + ) inputs.update(new_inputs) return project @@ -279,7 +271,7 @@ def _check_for_changes(d1: dict[str, Any], d2: dict[str, Any]) -> None: """ Checks if d1's values have changed compared to d2's. NOTE: Does not guarantee that d2's values have changed - compare to d1's. + compared to d1's. """ for k, v in d1.items(): if k not in d2: @@ -359,3 +351,25 @@ def default_copy_project_name(name: str) -> str: def replace_multiple_spaces(text: str) -> str: # Use regular expression to replace multiple spaces with a single space return re.sub(r"\s+", " ", text) + + +def default_copy_project_name(name: str) -> str: + if match := COPY_SUFFIX_RE.fullmatch(name): + new_copy_index = 1 + if current_copy_index := match.group(2): + # we receive something of type "(23)" + new_copy_index = ( + TypeAdapter(int).validate_python(current_copy_index.strip("()")) + 1 + ) + return f"{match.group(1)}({new_copy_index})" + return f"{name} (Copy)" + + +def replace_multiple_spaces(text: str) -> str: + # Use regular expression to replace multiple spaces with a single space + return re.sub(r"\s+", " ", text) + + +def replace_multiple_spaces(text: str) -> str: + # Use regular expression to replace multiple spaces with a single space + return re.sub(r"\s+", " ", text) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py index 094a7cebac99..d953510bf75b 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py @@ -5,7 +5,7 @@ from common_library.error_codes import create_error_code from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from common_library.user_messages import user_message -from models_library.function_services_catalog._utils import ServiceNotFound +from models_library.function_services_catalog._utils import ServiceNotFoundError from servicelib.aiohttp import status from servicelib.aiohttp.typing_extension import Handler @@ -121,7 +121,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: ) from err except ( - ServiceNotFound, + ServiceNotFoundError, FileToLargeError, IncompatibleServiceError, GuestUsersLimitError,