From 75c28f205acaeff1c9792bee3f1f1935a60fc5b2 Mon Sep 17 00:00:00 2001 From: WolfgangFischerEtas Date: Fri, 6 Feb 2026 09:56:24 +0100 Subject: [PATCH 1/4] Feature development: team with team-sync, external groups, team-permissions and environment variables and secrets (#1) * Feature development: team with team-sync, external groups, team-permissions and environment variables and secrets * chore: update ruff and mypy (#582) * chore: update ruff * chore: ruff auto fix * chore: ruff manual fixes * chore: update mypy and fix code * chore: fix ruff autofixes by updating odmantic See https://github.com/art049/odmantic/pull/501 * chore: ruff format * chore: fix non formatting block * chore: remove fixme * Merge remote-tracking branch 'upstream/main' into feature/teampermission * fix: status added in _run_page_query as it is now used * chore: fixed by ruff * feat: add team sync variables to team --------- Co-authored-by: Alexander Lanin --- .pre-commit-config.yaml | 4 +- DEPENDENCIES | 3 +- .../organization/repository/branch-or-tag.md | 2 +- .../repository/environment/index.md | 46 ++++ .../repository/environment/secret.md | 64 +++++ .../repository/environment/variable.md | 42 ++++ .../organization/repository/index.md | 9 +- .../repository/team-permission.md | 38 +++ docs/reference/organization/team.md | 2 + examples/template/otterdog-defaults.libsonnet | 37 ++- mkdocs.yml | 6 +- otterdog/jsonnet.py | 34 +++ otterdog/logging.py | 2 +- otterdog/models/__init__.py | 90 +++++-- otterdog/models/branch_protection_rule.py | 7 +- otterdog/models/custom_property.py | 16 +- otterdog/models/env_secret.py | 73 ++++++ otterdog/models/env_variable.py | 72 ++++++ otterdog/models/environment.py | 180 +++++++++++++- otterdog/models/github_organization.py | 100 ++++++-- otterdog/models/organization_ruleset.py | 9 +- otterdog/models/organization_secret.py | 9 +- otterdog/models/organization_settings.py | 15 +- otterdog/models/organization_variable.py | 9 +- .../models/organization_workflow_settings.py | 4 +- otterdog/models/repo_workflow_settings.py | 19 +- otterdog/models/repository.py | 116 ++++++--- otterdog/models/role.py | 2 +- otterdog/models/ruleset.py | 30 +-- otterdog/models/secret.py | 19 +- otterdog/models/team.py | 31 ++- otterdog/models/team_permission.py | 123 ++++++++++ otterdog/models/variable.py | 4 +- otterdog/models/webhook.py | 19 +- otterdog/models/workflow_settings.py | 2 +- otterdog/operations/__init__.py | 8 +- otterdog/operations/apply.py | 8 +- otterdog/operations/check_status.py | 3 + otterdog/operations/delete_file.py | 4 +- otterdog/operations/diff_operation.py | 11 +- otterdog/operations/fetch_config.py | 2 +- otterdog/operations/open_pull_request.py | 3 +- otterdog/operations/plan.py | 9 +- otterdog/operations/push_config.py | 4 +- otterdog/providers/github/__init__.py | 53 +++++ otterdog/providers/github/exception.py | 2 +- otterdog/providers/github/graphql.py | 43 +++- otterdog/providers/github/rest/__init__.py | 6 + otterdog/providers/github/rest/env_client.py | 169 +++++++++++++ otterdog/providers/github/rest/org_client.py | 14 +- .../providers/github/rest/reference_client.py | 4 +- otterdog/providers/github/rest/repo_client.py | 41 ++++ otterdog/providers/github/rest/team_client.py | 92 ++++++++ .../get-repository-permissions-of-team.gql | 23 ++ .../get-team-permissions-repositories.gql | 28 +++ otterdog/resources/schemas/env-secret.json | 8 + otterdog/resources/schemas/env-variable.json | 8 + otterdog/resources/schemas/environment.json | 8 + otterdog/resources/schemas/repository.json | 4 + .../resources/schemas/team-permission.json | 11 + otterdog/resources/schemas/team.json | 21 +- otterdog/utils.py | 4 +- otterdog/webapp/blueprints/__init__.py | 4 +- otterdog/webapp/db/models.py | 60 ++--- otterdog/webapp/policies/__init__.py | 4 +- otterdog/webapp/tasks/__init__.py | 3 +- otterdog/webapp/tasks/blueprints/__init__.py | 4 +- .../webapp/tasks/blueprints/pin_workflow.py | 2 +- .../webapp/tasks/validate_pull_request.py | 3 +- otterdog/webapp/utils.py | 2 +- otterdog/webapp/webhook/github_models.py | 4 +- poetry.lock | 223 ++++++++++++------ pyproject.toml | 6 +- tests/models/resources/github-env-secret.json | 4 + .../resources/github-team-permission.json | 4 + .../models/resources/otterdog-env-secret.json | 5 + .../resources/otterdog-team-permission.json | 4 + .../test-org/vendor/github-env-secret.json | 4 + tests/models/test_env_secret.py | 67 ++++++ tests/models/test_env_variable.py | 33 +++ tests/models/test_repository.py | 12 +- tests/models/test_ruleset.py | 24 +- tests/models/test_team_permission.py | 68 ++++++ tests/operations/test_list_advisories.py | 2 +- tests/operations/test_operation.py | 4 +- .../github/integration/helpers/http_mock.py | 2 +- .../github/integration/helpers/model.py | 16 +- 87 files changed, 2067 insertions(+), 326 deletions(-) create mode 100644 docs/reference/organization/repository/environment/index.md create mode 100644 docs/reference/organization/repository/environment/secret.md create mode 100644 docs/reference/organization/repository/environment/variable.md create mode 100644 docs/reference/organization/repository/team-permission.md create mode 100644 otterdog/models/env_secret.py create mode 100644 otterdog/models/env_variable.py create mode 100644 otterdog/models/team_permission.py create mode 100644 otterdog/providers/github/rest/env_client.py create mode 100644 otterdog/resources/graphql/get-repository-permissions-of-team.gql create mode 100644 otterdog/resources/graphql/get-team-permissions-repositories.gql create mode 100644 otterdog/resources/schemas/env-secret.json create mode 100644 otterdog/resources/schemas/env-variable.json create mode 100644 otterdog/resources/schemas/team-permission.json create mode 100644 tests/models/resources/github-env-secret.json create mode 100644 tests/models/resources/github-team-permission.json create mode 100644 tests/models/resources/otterdog-env-secret.json create mode 100644 tests/models/resources/otterdog-team-permission.json create mode 100644 tests/models/resources/test-org/vendor/github-env-secret.json create mode 100644 tests/models/test_env_secret.py create mode 100644 tests/models/test_env_variable.py create mode 100644 tests/models/test_team_permission.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c4fe46f..db7abcdc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,13 +11,13 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.1 + rev: v0.15.0 hooks: - id: ruff args: [ --fix ] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.13.0 + rev: v1.19.1 hooks: - id: mypy additional_dependencies: [types-requests, types-aiofiles, types-redis, types-PyYAML, types-python-slugify] diff --git a/DEPENDENCIES b/DEPENDENCIES index 6c819aa1..8dcd7caa 100644 --- a/DEPENDENCIES +++ b/DEPENDENCIES @@ -2,6 +2,7 @@ pypi/pypi/-/aiofiles/24.1.0 pypi/pypi/-/aiohappyeyeballs/2.6.1 pypi/pypi/-/aiohttp/3.13.3 pypi/pypi/-/aiohttp-client-cache/0.14.2 +pypi/pypi/-/aiohttp-client-cache/0.14.2 pypi/pypi/-/aiohttp-retry/2.9.1 pypi/pypi/-/aioshutil/1.6 pypi/pypi/-/aiosignal/1.4.0 @@ -82,6 +83,6 @@ pypi/pypi/-/typing-extensions/4.15.0 pypi/pypi/-/typing-inspection/0.4.2 pypi/pypi/-/url-normalize/2.2.1 pypi/pypi/-/urllib3/2.6.3 -pypi/pypi/-/werkzeug/3.1.4 +pypi/pypi/-/werkzeug/3.1.5 pypi/pypi/-/wsproto/1.3.1 pypi/pypi/-/yarl/1.22.0 diff --git a/docs/reference/organization/repository/branch-or-tag.md b/docs/reference/organization/repository/branch-or-tag.md index 0fda1415..e21f0169 100644 --- a/docs/reference/organization/repository/branch-or-tag.md +++ b/docs/reference/organization/repository/branch-or-tag.md @@ -1,4 +1,4 @@ -A BranchOrTag represents either a branch or tag pattern to use within an [Environment](environment.md). +A BranchOrTag represents either a branch or tag pattern to use within an [Environment](environment/index.md). The following format is used to distinguish between tags and branches: | Type | Format | Example | diff --git a/docs/reference/organization/repository/environment/index.md b/docs/reference/organization/repository/environment/index.md new file mode 100644 index 00000000..cd2b53a1 --- /dev/null +++ b/docs/reference/organization/repository/environment/index.md @@ -0,0 +1,46 @@ +Definition of an `Environment` on repository level, the following properties are supported: + +| Key | Value | Description | Notes | +|----------------------------|--------------------------------------------|---------------------------------------------------------------------------------------|--------------------------------------------------------------------| +| _name_ | string | The name of the environment | | +| _wait_timer_ | int | The amount of time to wait before allowing deployments to proceed | | +| _reviewers_ | list\[[Actor](../actor.md)\] | Users or Teams that may approve workflow runs that access this environment | | +| _deployment_branch_policy_ | string | Limit which branches can deploy to this environment based on rules or naming patterns | `all`, `protected` or `selected` | +| _branch_policies_ | list\[[BranchOrTag](../branch-or-tag.md)\] | List of branch or tag patterns which can deploy to this environment | only applicable if `deployment_branch_policy` is set to `selected` | + +## Jsonnet Function + +``` jsonnet +orgs.newEnvironment('') { + : +} +``` + +## Validation rules + +- specifying a non-empty list of `branch_policies` while `deployment_branch_policy` is not set to `selected` triggers a warning + +## Example usage + +=== "jsonnet" + ``` jsonnet + orgs.newOrg('OtterdogTest') { + ... + _repositories+:: [ + ... + orgs.newRepo('test-repo') { + ... + environments: [ + orgs.newEnvironment('linux') { + deployment_branch_policy: "protected", + reviewers+: [ + "@OtterdogTest/eclipsefdn-security", + "@netomi" + ], + wait_timer: 30, + }, + ] + } + ] + } + ``` diff --git a/docs/reference/organization/repository/environment/secret.md b/docs/reference/organization/repository/environment/secret.md new file mode 100644 index 00000000..8d2ce270 --- /dev/null +++ b/docs/reference/organization/repository/environment/secret.md @@ -0,0 +1,64 @@ +Definition of a `Secret` on repository environment level, the following properties are supported: + +| Key | Value | Description | Note | +|-------------------------|----------------|------------------------------------------------|------| +| _name_ | string | The name of the secret | | +| _value_ | string | The secret value | | + +The secret value can be resolved via a credential provider. The supported format is `:`. + +- Bitwarden: `bitwarden:@` + + ``` json + "secret": "bitwarden:118276ad-158c-4720-b68d-af8c00fe3481@secret" + ``` + +- Pass: `pass:` + + ``` json + "secret": "pass:path/to/repo/secret" + ``` + +!!! note + + After executing an `import` operation, the secret will be set to `********` as GitHub will not disclose the + secret value anymore via its API. You will need to update the configuration with the real secret value, either + by entering the secret value (not advised), or referencing it via a credential provider. + + Secrets which have a redacted value defined will be skipped during processing. + +## Jsonnet Function + +``` jsonnet +orgs.newEnvSecret('') { + : +} +``` + +## Validation rules + +- redacted secret values (`********`) trigger a validation info and will skip the secret during processing + +## Example usage + +=== "jsonnet" + ``` jsonnet + orgs.newOrg('OtterdogTest') { + ... + _repositories+:: [ + ... + orgs.newRepo('test-repo') { + ... + environments: [ + orgs.newEnvironment('Environment') { + secrets: [ + orgs.newEnvSecret('TEST_SECRET') { + value: "pass:path/to/secret", + }, + ], + }, + ], + } + ] + } + ``` diff --git a/docs/reference/organization/repository/environment/variable.md b/docs/reference/organization/repository/environment/variable.md new file mode 100644 index 00000000..0432573b --- /dev/null +++ b/docs/reference/organization/repository/environment/variable.md @@ -0,0 +1,42 @@ +Definition of a `Variable` on repository environment level, the following properties are supported: + +| Key | Value | Description | Note | +|-------------------------|----------------|--------------------------|------| +| _name_ | string | The name of the variable | | +| _value_ | string | The variable value | | + +## Jsonnet Function + +``` jsonnet +orgs.newEnvVariable('') { + : +} +``` + +## Validation rules + +- None + +## Example usage + +=== "jsonnet" + ``` jsonnet + orgs.newOrg('OtterdogTest') { + ... + _repositories+:: [ + ... + orgs.newRepo('test-repo') { + ... + environments: [ + orgs.newEnvironment('Environment') { + variables: [ + orgs.newEnvVariable('TEST_VARIABLE') { + value: "TESTVALUE", + }, + ], + }, + ], + } + ] + } + ``` diff --git a/docs/reference/organization/repository/index.md b/docs/reference/organization/repository/index.md index b15364c1..bf3a9143 100644 --- a/docs/reference/organization/repository/index.md +++ b/docs/reference/organization/repository/index.md @@ -51,8 +51,9 @@ Definition of a Repository for a GitHub organization, the following properties a | _webhooks_ | list\[[Webhook](webhook.md)\] | webhooks defined for this repo, see section above for details | | | _secrets_ | list\[[RepositorySecret](secret.md)\] | secrets defined for this repo, see section below for details | | | _variables_ | list\[[RepositoryVariable](variable.md)\] | variables defined for this repo, see section below for details | | -| _environments_ | list\[[Environment](environment.md)\] | environments defined for this repo, see section below for details | | +| _environments_ | list\[[Environment](environment/index.md)\] | environments defined for this repo, see section below for details | | | _branch_protection_rules_ | list\[[BranchProtectionRule](branch-protection-rule.md)\] | branch protection rules of the repo, see section below for details | | +| _team_permissions_ | list\[[TeamPermission](team-permission.md)\] | team permissions defined for this repo, see section below for details | allowed are the following: `pull`, `triage`, `push`, `maintain`, `admin` or `READ`, `WRITE`, `MAINTAIN`, `TRIAGE`, `ADMIN` (The latter values come from github graphql) | ## Embedded Models @@ -137,6 +138,12 @@ Definition of a Repository for a GitHub organization, the following properties a branch_protection_rules: [ orgs.newBranchProtectionRule('main'), ], + team_permissions: [ + orgs.newTeamPermission('team') { + permission: "maintain", + }, + ], }, + } ``` diff --git a/docs/reference/organization/repository/team-permission.md b/docs/reference/organization/repository/team-permission.md new file mode 100644 index 00000000..0cf7e846 --- /dev/null +++ b/docs/reference/organization/repository/team-permission.md @@ -0,0 +1,38 @@ +Definition of a `Team Permission`, the following properties are supported: + +| Key | Value | Description | Notes | +|----------------------------|-----------------------------------------|---------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| _name_ | string | The name of the team in the organization. | | +| _permission_ | string | The name of the permission. | allowed are the following: `pull`, `triage`, `push`, `maintain`, `admin` or `READ`, `WRITE`, `MAINTAIN`, `TRIAGE`, `ADMIN` (The latter values come from github graphql) | + +## Jsonnet Function + +``` jsonnet +orgs.newTeamPermission('') { + : +} +``` + +## Validation rules + +- allowed values are the following `pull`, `triage`, `push`, `maintain`, `admin` or `READ`, `WRITE`, `MAINTAIN`, `TRIAGE`, `ADMIN`. + +## Example usage + +=== "jsonnet" + ``` jsonnet + orgs.newOrg('OtterdogTest') { + ... + _repositories+:: [ + ... + orgs.newRepo('test-repo') { + ... + team_permissions: [ + orgs.newTeamPermission('team') { + permission: "maintain", + }, + ], + } + ] + } + ``` diff --git a/docs/reference/organization/team.md b/docs/reference/organization/team.md index 45cc7338..71b1cbce 100644 --- a/docs/reference/organization/team.md +++ b/docs/reference/organization/team.md @@ -9,6 +9,8 @@ Definition of an organization `Team`, the following properties are supported: | _members_ | list[string] | List of users that should be a member of the team | | | _skip_members_ | boolean | If `true`, team members will be ignored | | | _skip_non_organization_members_ | boolean | If `true`, users which are not yet organization members can not be added to the team | | +| _team_sync_ | string | The id of an IdP group which is connected to a team on Github Enterprise Cloud | | +| _external_groups_ | string | The id of an external group which is provisioned on the enterprise | | ## Jsonnet Function diff --git a/examples/template/otterdog-defaults.libsonnet b/examples/template/otterdog-defaults.libsonnet index 4c2ec91e..b6ddd341 100644 --- a/examples/template/otterdog-defaults.libsonnet +++ b/examples/template/otterdog-defaults.libsonnet @@ -104,7 +104,10 @@ local newRepo(name) = { branch_protection_rules: [], # rulesets - rulesets: [] + rulesets: [], + + # team permissions + team_permissions: [] }; # Function to extend an existing repo with the same name. @@ -218,6 +221,12 @@ local newOrgWebhook(url) = { # Function to create a new repository webhook with default settings. local newRepoWebhook(url) = newOrgWebhook(url); +# Function to create a new environment secret with default settings. +local newEnvSecret(name) = { + name: name, + value: null +}; + # Function to create a new repository secret with default settings. local newRepoSecret(name) = { name: name, @@ -230,6 +239,12 @@ local newOrgSecret(name) = newRepoSecret(name) { selected_repositories: [], }; +# Function to create a new environment variable with default settings. +local newEnvVariable(name) = { + name: name, + value: null +}; + # Function to create a new repository variable with default settings. local newRepoVariable(name) = { name: name, @@ -256,9 +271,19 @@ local newTeam(name) = { description: "", privacy: "visible", notifications: true, - members: [], + #members: [], skip_members: false, skip_non_organization_members: false, + team_sync_id: null, + team_sync_name: null, + team_sync_description: null, + external_groups: null, +}; + +# Function to create a new term permission with default settings. +local newTeamPermission(name) = { + name: name, + permission: "pull", }; # Function to create a new environment with default settings. @@ -269,6 +294,10 @@ local newEnvironment(name) = { # Can be one of: all, protected_branches, branch_policies deployment_branch_policy: "all", branch_policies: [], + # environment secrets + secrets: [], + # environment variables + variables: [], }; # Function to create a new custom property with default settings. @@ -429,10 +458,14 @@ local newOrg(name, id=name) = { newRepoWebhook:: newRepoWebhook, newRepoSecret:: newRepoSecret, newRepoVariable:: newRepoVariable, + newEnvSecret:: newEnvSecret, + newEnvVariable:: newEnvVariable, newBranchProtectionRule:: newBranchProtectionRule, newRepoRuleset:: newRepoRuleset, newEnvironment:: newEnvironment, newPullRequest:: newPullRequest, newStatusChecks:: newStatusChecks, + newTeam:: newTeam, + newTeamPermission:: newTeamPermission, newMergeQueue:: newMergeQueue, } diff --git a/mkdocs.yml b/mkdocs.yml index a6228613..2ebd42c1 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -58,9 +58,12 @@ nav: - Repository Webhook: reference/organization/repository/webhook.md - Repository Secret: reference/organization/repository/secret.md - Repository Variable: reference/organization/repository/variable.md - - Environment: reference/organization/repository/environment.md - Branch Protection Rule: reference/organization/repository/branch-protection-rule.md - Repository Ruleset: reference/organization/repository/ruleset.md + - Environment: + - reference/organization/repository/environment/index.md + - Environment Secret: reference/organization/repository/environment/secret.md + - Environment Variable: reference/organization/repository/environment/variable.md - Referenced Types: - Actor: reference/organization/repository/actor.md - Branch or Tag: reference/organization/repository/branch-or-tag.md @@ -68,6 +71,7 @@ nav: - Repo Name Matcher: reference/organization/repo-name-matcher.md - Ref Matcher: reference/organization/repository/ref-matcher.md - Status Check: reference/organization/repository/status-check.md + - Team Permission: reference/organization/repository/team-permission.md - Policies: - reference/policies/index.md - Upload SBOM data to dependency track: reference/policies/dependency-track-upload.md diff --git a/otterdog/jsonnet.py b/otterdog/jsonnet.py index 793942c7..c118eea0 100644 --- a/otterdog/jsonnet.py +++ b/otterdog/jsonnet.py @@ -29,6 +29,7 @@ class JsonnetConfig: create_org = "newOrg" create_org_role = "newOrgRole" create_org_team = "newTeam" + create_org_team_permission = "newTeamPermission" create_org_custom_property = "newCustomProperty" create_org_webhook = "newOrgWebhook" create_org_secret = "newOrgSecret" @@ -45,6 +46,9 @@ class JsonnetConfig: create_pull_request = "newPullRequest" create_status_checks = "newStatusChecks" create_merge_queue = "newMergeQueue" + create_team_permission = "newTeamPermission" + create_env_secret = "newEnvSecret" + create_env_variable = "newEnvVariable" def __init__( self, @@ -229,6 +233,26 @@ def default_repo_variable_config(self): _logger.debug("no default repo variable config found, variables will be skipped") return None + @cached_property + def default_env_secret_config(self): + try: + # load the default repo env secret config + env_secret_snippet = f"(import '{self.template_file}').{self.create_env_secret}('default')" + return jsonnet_evaluate_snippet(env_secret_snippet) + except RuntimeError: + _logger.debug("no default repo env secret config found, secrets will be skipped") + return None + + @cached_property + def default_env_variable_config(self): + try: + # load the default repo env variable config + env_variable_snippet = f"(import '{self.template_file}').{self.create_env_variable}('default')" + return jsonnet_evaluate_snippet(env_variable_snippet) + except RuntimeError: + _logger.debug("no default repo env variable config found, variables will be skipped") + return None + @cached_property def default_branch_protection_rule_config(self): try: @@ -261,6 +285,16 @@ def default_environment_config(self): _logger.debug("no default environment config found, environments will be skipped") return None + @cached_property + def default_team_permission_config(self): + try: + # load the default team permission config + teampermission_snippet = f"(import '{self.template_file}').{self.create_team_permission}('default')" + return jsonnet_evaluate_snippet(teampermission_snippet) + except RuntimeError: + _logger.debug("no default team permission config found, team permissions will be skipped") + return None + @cached_property def default_pull_request_config(self): try: diff --git a/otterdog/logging.py b/otterdog/logging.py index a44965cd..b1e7c1bd 100644 --- a/otterdog/logging.py +++ b/otterdog/logging.py @@ -96,7 +96,7 @@ def init_logging(verbose: int, setup_python_logger: bool = True) -> None: def get_logger(name: str) -> CustomLogger: - return cast(CustomLogger, logging.getLogger(name)) + return cast("CustomLogger", logging.getLogger(name)) def is_info_enabled() -> bool: diff --git a/otterdog/models/__init__.py b/otterdog/models/__init__.py index 3e76a0b2..9bcf8d5d 100644 --- a/otterdog/models/__init__.py +++ b/otterdog/models/__init__.py @@ -90,17 +90,30 @@ class LivePatch(Generic[MT]): current_object: MT | None changes: dict[str, Change] | None parent_object: ModelObject | None + grandparent_object: ModelObject | None forced_update: bool fn: LivePatchApplyFn changes_object_to_readonly: bool = False @classmethod - def of_addition(cls, expected_object: MT, parent_object: ModelObject | None, fn: LivePatchApplyFn[MT]) -> LivePatch: - return LivePatch(LivePatchType.ADD, expected_object, None, None, parent_object, False, fn) + def of_addition( + cls, + expected_object: MT, + parent_object: ModelObject | None, + grandparent_object: ModelObject | None, + fn: LivePatchApplyFn[MT], + ) -> LivePatch: + return LivePatch(LivePatchType.ADD, expected_object, None, None, parent_object, grandparent_object, False, fn) @classmethod - def of_deletion(cls, current_object: MT, parent_object: ModelObject | None, fn: LivePatchApplyFn[MT]) -> LivePatch: - return LivePatch(LivePatchType.REMOVE, None, current_object, None, parent_object, False, fn) + def of_deletion( + cls, + current_object: MT, + parent_object: ModelObject | None, + grandparent_object: ModelObject | None, + fn: LivePatchApplyFn[MT], + ) -> LivePatch: + return LivePatch(LivePatchType.REMOVE, None, current_object, None, parent_object, grandparent_object, False, fn) @classmethod def of_changes( @@ -109,6 +122,7 @@ def of_changes( current_object: MT, changes: dict[str, Change], parent_object: ModelObject | None, + grandparent_object: ModelObject | None, forced_update: bool, fn: LivePatchApplyFn[MT], changes_object_to_readonly: bool = False, @@ -119,6 +133,7 @@ def of_changes( current_object, changes, parent_object, + grandparent_object, forced_update, fn, changes_object_to_readonly, @@ -184,7 +199,7 @@ class EmbeddedModelObject(ABC): """ @abstractmethod - def validate(self, context: ValidationContext, parent_object: Any) -> None: ... + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: ... def get_difference_from(self, other: Self) -> Change | None: if not isinstance(other, self.__class__): @@ -370,7 +385,7 @@ def get_all_key_values(self) -> list[Any]: return [self.get_key_value()] @abstractmethod - def validate(self, context: ValidationContext, parent_object: Any) -> None: ... + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: ... # noinspection PyMethodMayBeStatic def execute_custom_validation_if_present(self, context: ValidationContext, filename: str) -> None: @@ -508,7 +523,9 @@ def is_embedded_model(field: dataclasses.Field) -> bool: def get_model_objects(self) -> Iterator[tuple[ModelObject, ModelObject]]: yield from [] - def get_model_header(self, parent_object: ModelObject | None = None) -> str: + def get_model_header( + self, parent_object: ModelObject | None = None, grandparent_object: ModelObject | None = None + ) -> str: header = f"[bold]{self.model_object_name}[/]" if self.is_keyed(): @@ -521,13 +538,27 @@ def get_model_header(self, parent_object: ModelObject | None = None) -> str: + f", {parent_object.model_object_name}=" + f"[bold]{escape(parent_object.get_key_value())}[/]" ) + if isinstance(grandparent_object, ModelObject) and grandparent_object.is_keyed(): + header = ( + header + + f", {grandparent_object.model_object_name}=" + + f"[bold]{escape(grandparent_object.get_key_value())}[/]" + ) header = header + "]" - elif isinstance(parent_object, ModelObject) and parent_object.is_keyed(): - header = header + "\\[" - header = ( - header + f"{parent_object.model_object_name}=" + f"[bold]{escape(parent_object.get_key_value())}[/]" - ) + else: + if isinstance(parent_object, ModelObject) and parent_object.is_keyed(): + header = header + "\\[" + header = ( + header + f"{parent_object.model_object_name}=" + f"[bold]{escape(parent_object.get_key_value())}[/]" + ) + if isinstance(grandparent_object, ModelObject) and grandparent_object.is_keyed(): + header = header + "\\[" + header = ( + header + + f"{grandparent_object.model_object_name}=" + + f"[bold]{escape(grandparent_object.get_key_value())}[/]" + ) header = header + "]" return header @@ -592,7 +623,9 @@ def include_for_live_patch(self, context: LivePatchContext) -> bool: """ return True - def include_existing_object_for_live_patch(self, org_id: str, parent_object: ModelObject | None) -> bool: + def include_existing_object_for_live_patch( + self, org_id: str, parent_object: ModelObject | None, grandparent_object: ModelObject | None + ) -> bool: """ Indicates if this live ModelObject should be considered when generating a live patch. @@ -651,9 +684,9 @@ def to_model_dict( if exclude_none_values and not is_set_and_valid(value): continue elif self.is_nested_model_key(key): - result[key] = cast(ModelObject, value).to_model_dict(for_diff, include_nested_models) + result[key] = cast("ModelObject", value).to_model_dict(for_diff, include_nested_models) elif self.is_embedded_model_key(key) and is_set_and_valid(value): - result[key] = cast(EmbeddedModelObject, value).to_model_dict() + result[key] = cast("EmbeddedModelObject", value).to_model_dict() else: result[key] = value @@ -709,17 +742,26 @@ def generate_live_patch( expected_object: MT | None, current_object: MT | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: if current_object is None: expected_object = unwrap(expected_object) - handler(LivePatch.of_addition(expected_object, parent_object, expected_object.apply_live_patch)) + handler( + LivePatch.of_addition( + expected_object, parent_object, grandparent_object, expected_object.apply_live_patch + ) + ) return if expected_object is None: current_object = unwrap(current_object) - handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch)) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) return modified_rule: dict[str, Change[Any]] = expected_object.get_difference_from(current_object) @@ -731,6 +773,7 @@ def generate_live_patch( current_object, modified_rule, parent_object, + grandparent_object, False, expected_object.apply_live_patch, ) @@ -742,6 +785,7 @@ def generate_live_patch_of_list( expected_objects: Sequence[MT], current_objects: Sequence[MT], parent_object: MT | None, + grandparent_object: MT | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: @@ -762,12 +806,16 @@ def generate_live_patch_of_list( break if expected_object is None: - if current_object.include_existing_object_for_live_patch(context.org_id, parent_object): - cls.generate_live_patch(None, current_object, parent_object, context, handler) + if current_object.include_existing_object_for_live_patch( + context.org_id, parent_object, grandparent_object + ): + cls.generate_live_patch(None, current_object, parent_object, grandparent_object, context, handler) continue if expected_object.include_for_live_patch(context): - cls.generate_live_patch(expected_object, current_object, parent_object, context, handler) + cls.generate_live_patch( + expected_object, current_object, parent_object, grandparent_object, context, handler + ) for k in expected_object.get_all_key_values(): expected_objects_by_all_keys.pop(k) @@ -775,7 +823,7 @@ def generate_live_patch_of_list( for _, expected_object in expected_objects_by_key.items(): if expected_object.include_for_live_patch(context): - cls.generate_live_patch(expected_object, None, parent_object, context, handler) + cls.generate_live_patch(expected_object, None, parent_object, grandparent_object, context, handler) @classmethod @abstractmethod diff --git a/otterdog/models/branch_protection_rule.py b/otterdog/models/branch_protection_rule.py index ec5a6e60..769ce442 100644 --- a/otterdog/models/branch_protection_rule.py +++ b/otterdog/models/branch_protection_rule.py @@ -83,7 +83,7 @@ class BranchProtectionRule(ModelObject): def model_object_name(self) -> str: return "branch_protection_rule" - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: # when requires_approving_reviews is false, issue a warning if dependent settings # are still set to non default values. @@ -185,9 +185,10 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: ) if self.requires_deployments is True and len(self.required_deployment_environments) > 0: - from .repository import Repository + if TYPE_CHECKING: + from .repository import Repository - environments = cast(Repository, parent_object).environments + environments = cast("Repository", parent_object).environments environments_by_name = associate_by_key(environments, lambda x: x.name) for env_name in self.required_deployment_environments: diff --git a/otterdog/models/custom_property.py b/otterdog/models/custom_property.py index b4b4ae75..97e45d9b 100644 --- a/otterdog/models/custom_property.py +++ b/otterdog/models/custom_property.py @@ -46,7 +46,7 @@ class CustomProperty(ModelObject): def model_object_name(self) -> str: return "custom_property" - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if is_set_and_valid(self.value_type): if self.value_type not in {"string", "single_select", "multi_select", "true_false"}: context.add_failure( @@ -171,16 +171,25 @@ def generate_live_patch( expected_object: CustomProperty | None, current_object: CustomProperty | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: if current_object is None: expected_object = unwrap(expected_object) - handler(LivePatch.of_addition(expected_object, parent_object, expected_object.apply_live_patch)) + handler( + LivePatch.of_addition( + expected_object, parent_object, grandparent_object, expected_object.apply_live_patch + ) + ) return if expected_object is None: - handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch)) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) return modified_property: dict[str, Change[Any]] = expected_object.get_difference_from(current_object) @@ -198,6 +207,7 @@ def generate_live_patch( current_object, modified_property, parent_object, + grandparent_object, False, expected_object.apply_live_patch, ) diff --git a/otterdog/models/env_secret.py b/otterdog/models/env_secret.py new file mode 100644 index 00000000..7b0a912e --- /dev/null +++ b/otterdog/models/env_secret.py @@ -0,0 +1,73 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from __future__ import annotations + +import dataclasses +from typing import TYPE_CHECKING + +from otterdog.models import LivePatch, LivePatchType +from otterdog.models.secret import Secret +from otterdog.utils import expect_type, unwrap + +if TYPE_CHECKING: + from otterdog.jsonnet import JsonnetConfig + from otterdog.providers.github import GitHubProvider + + +@dataclasses.dataclass +class EnvironmentSecret(Secret): + """ + Represents a Secret defined on environment level. + """ + + @property + def model_object_name(self) -> str: + return "env_secret" + + def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: bool) -> str | None: + return f"orgs.{jsonnet_config.create_env_secret}" + + @classmethod + async def apply_live_patch( + cls, + patch: LivePatch[EnvironmentSecret], + org_id: str, + provider: GitHubProvider, + ) -> None: + from .environment import Environment + from .repository import Repository + + environment = expect_type(patch.parent_object, Environment) + repository = expect_type(patch.grandparent_object, Repository) + + match patch.patch_type: + case LivePatchType.ADD: + await provider.add_env_secret( + org_id, + repository.name, + environment.name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) + + case LivePatchType.REMOVE: + await provider.delete_env_secret( + org_id, + repository.name, + environment.name, + unwrap(patch.current_object).name, + ) + + case LivePatchType.CHANGE: + await provider.update_env_secret( + org_id, + repository.name, + environment.name, + unwrap(patch.current_object).name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) diff --git a/otterdog/models/env_variable.py b/otterdog/models/env_variable.py new file mode 100644 index 00000000..f6313105 --- /dev/null +++ b/otterdog/models/env_variable.py @@ -0,0 +1,72 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from __future__ import annotations + +import dataclasses +from typing import TYPE_CHECKING + +from otterdog.models import LivePatch, LivePatchType +from otterdog.models.variable import Variable +from otterdog.utils import expect_type, unwrap + +if TYPE_CHECKING: + from otterdog.jsonnet import JsonnetConfig + from otterdog.providers.github import GitHubProvider + + +@dataclasses.dataclass +class EnvironmentVariable(Variable): + """ + Represents a Variable defined on environment level. + """ + + @property + def model_object_name(self) -> str: + return "env_variable" + + def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: bool) -> str | None: + return f"orgs.{jsonnet_config.create_env_variable}" + + @classmethod + async def apply_live_patch( + cls, + patch: LivePatch[EnvironmentVariable], + org_id: str, + provider: GitHubProvider, + ) -> None: + from .environment import Environment + from .repository import Repository + + environment = expect_type(patch.parent_object, Environment) + repository = expect_type(patch.grandparent_object, Repository) + match patch.patch_type: + case LivePatchType.ADD: + await provider.add_env_variable( + org_id, + repository.name, + environment.name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) + + case LivePatchType.REMOVE: + await provider.delete_env_variable( + org_id, + repository.name, + environment.name, + unwrap(patch.current_object).name, + ) + + case LivePatchType.CHANGE: + await provider.update_env_variable( + org_id, + repository.name, + environment.name, + unwrap(patch.current_object).name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) diff --git a/otterdog/models/environment.py b/otterdog/models/environment.py index b79005c3..b59720ab 100644 --- a/otterdog/models/environment.py +++ b/otterdog/models/environment.py @@ -9,23 +9,39 @@ from __future__ import annotations import dataclasses -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, TypeVar from jsonbender import F, Filter, Forall, If, K, OptionalS, S # type: ignore from otterdog.models import ( FailureType, LivePatch, + LivePatchContext, + LivePatchHandler, LivePatchType, ModelObject, + PatchContext, ValidationContext, ) -from otterdog.utils import expect_type, is_set_and_valid, is_unset, unwrap +from otterdog.utils import ( + Change, + IndentingPrinter, + expect_type, + is_set_and_valid, + is_unset, + unwrap, + write_patch_object_as_json, +) + +from .env_secret import EnvironmentSecret +from .env_variable import EnvironmentVariable if TYPE_CHECKING: from otterdog.jsonnet import JsonnetConfig from otterdog.providers.github import GitHubProvider +ET = TypeVar("ET", bound="Environment") + @dataclasses.dataclass class Environment(ModelObject): @@ -40,12 +56,32 @@ class Environment(ModelObject): reviewers: list[str] deployment_branch_policy: str branch_policies: list[str] + secrets: list[EnvironmentSecret] = dataclasses.field(metadata={"nested_model": True}, default_factory=list) + variables: list[EnvironmentVariable] = dataclasses.field(metadata={"nested_model": True}, default_factory=list) + + def add_secret(self, secret: EnvironmentSecret) -> None: + self.secrets.append(secret) + + def get_secret(self, name: str) -> EnvironmentSecret | None: + return next(filter(lambda x: x.name == name, self.secrets), None) # type: ignore + + def set_secrets(self, secrets: list[EnvironmentSecret]) -> None: + self.secrets = secrets + + def add_variable(self, variable: EnvironmentVariable) -> None: + self.variables.append(variable) + + def get_variable(self, name: str) -> EnvironmentVariable | None: + return next(filter(lambda x: x.name == name, self.variables), None) # type: ignore + + def set_variables(self, variables: list[EnvironmentVariable]) -> None: + self.variables = variables @property def model_object_name(self) -> str: return "environment" - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if not is_unset(self.wait_timer) and not (0 <= self.wait_timer <= 43200): context.add_failure( FailureType.ERROR, @@ -69,6 +105,11 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: f"'{self.deployment_branch_policy}', " f"but 'branch_policies' is set to '{self.branch_policies}', setting will be ignored.", ) + for secret in self.secrets: + secret.validate(context, self, parent_object) + + for variable in self.variables: + variable.validate(context, self, parent_object) def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: if self.deployment_branch_policy != "selected": @@ -80,7 +121,9 @@ def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: def include_field_for_patch_computation(self, field: dataclasses.Field) -> bool: return True - def include_existing_object_for_live_patch(self, org_id: str, parent_object: ModelObject | None) -> bool: + def include_existing_object_for_live_patch( + self, org_id: str, parent_object: ModelObject | None, grandparent_object: ModelObject | None + ) -> bool: from .repository import Repository parent_object = expect_type(parent_object, Repository) @@ -94,6 +137,19 @@ def include_existing_object_for_live_patch(self, org_id: str, parent_object: Mod else: return True + @classmethod + def get_mapping_from_model(cls) -> dict[str, Any]: + mapping = super().get_mapping_from_model() + + mapping.update( + { + "secrets": OptionalS("secrets", default=[]) >> Forall(lambda x: EnvironmentSecret.from_model_data(x)), + "variables": OptionalS("variables", default=[]) + >> Forall(lambda x: EnvironmentVariable.from_model_data(x)), + } + ) + return mapping + @classmethod def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[str, Any]: mapping = super().get_mapping_from_provider(org_id, data) @@ -101,10 +157,10 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st def transform_reviewers(x): match x["type"]: case "User": - return f'@{x["reviewer"]["login"]}' + return f"@{x['reviewer']['login']}" case "Team": - return f'@{org_id}/{x["reviewer"]["slug"]}' + return f"@{org_id}/{x['reviewer']['slug']}" case _: raise RuntimeError("unexpected review type '{x[\"type\"]}'") @@ -143,6 +199,8 @@ def transform_branch_policy(x): >> Forall(lambda x: transform_reviewers(x)), "deployment_branch_policy": OptionalS("deployment_branch_policy") >> F(transform_policy), "branch_policies": OptionalS("branch_policies", default=[]) >> Forall(transform_branch_policy), + "secrets": K([]), + "variables": K([]), } ) return mapping @@ -192,6 +250,65 @@ async def get_mapping_to_provider( def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: bool) -> str | None: return f"orgs.{jsonnet_config.create_environment}" + @classmethod + def generate_live_patch( + cls, + expected_object: Environment | None, + current_object: Environment | None, + parent_object: ModelObject | None, + grandparent_object: ModelObject | None, + context: LivePatchContext, + handler: LivePatchHandler, + ) -> None: + if expected_object is None: + current_object = unwrap(current_object) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) + return + + if current_object is None: + handler( + LivePatch.of_addition( + expected_object, parent_object, grandparent_object, expected_object.apply_live_patch + ) + ) + else: + modified_rule: dict[str, Change[Any]] = expected_object.get_difference_from(current_object) + + if len(modified_rule) > 0: + handler( + LivePatch.of_changes( + expected_object, + current_object, + modified_rule, + parent_object, + grandparent_object, + False, + expected_object.apply_live_patch, + ) + ) + + EnvironmentSecret.generate_live_patch_of_list( + expected_object.secrets, + current_object.secrets if current_object is not None else [], + expected_object, + parent_object, + context, + handler, + ) + + EnvironmentVariable.generate_live_patch_of_list( + expected_object.variables, + current_object.variables if current_object is not None else [], + expected_object, + parent_object, + context, + handler, + ) + @classmethod async def apply_live_patch(cls, patch: LivePatch[Environment], org_id: str, provider: GitHubProvider) -> None: from .repository import Repository @@ -221,3 +338,54 @@ async def apply_live_patch(cls, patch: LivePatch[Environment], org_id: str, prov current_object.name, await cls.changes_to_provider(org_id, unwrap(patch.changes), provider), ) + + def to_jsonnet( + self, + printer: IndentingPrinter, + jsonnet_config: JsonnetConfig, + context: PatchContext, + extend: bool, + default_object: ModelObject, + ) -> None: + patch = self.get_patch_to(default_object) + + has_secrets = len(self.secrets) > 0 + has_variables = len(self.variables) > 0 + + if "name" in patch: + patch.pop("name") + + function = self.get_jsonnet_template_function(jsonnet_config, extend) + printer.print(f"{function}('{self.name}')") + + write_patch_object_as_json(patch, printer, close_object=False) + + # FIXME: support overriding secrets for repos coming from the default configuration. + if has_secrets: + default_env_secret = EnvironmentSecret.from_model_data(jsonnet_config.default_env_secret_config) + + printer.println("secrets: [") + printer.level_up() + + for secret in self.secrets: + secret.to_jsonnet(printer, jsonnet_config, context, False, default_env_secret) + + printer.level_down() + printer.println("],") + + # FIXME: support overriding variables for repos coming from the default configuration. + if has_variables: + default_env_variable = EnvironmentVariable.from_model_data(jsonnet_config.default_env_variable_config) + + printer.println("variables: [") + printer.level_up() + + for variable in self.variables: + variable.to_jsonnet(printer, jsonnet_config, context, False, default_env_variable) + + printer.level_down() + printer.println("],") + + # close the repo object + printer.level_down() + printer.println("},") diff --git a/otterdog/models/github_organization.py b/otterdog/models/github_organization.py index 0e22a2e4..d71984ae 100644 --- a/otterdog/models/github_organization.py +++ b/otterdog/models/github_organization.py @@ -31,6 +31,8 @@ ) from otterdog.models.branch_protection_rule import BranchProtectionRule from otterdog.models.custom_property import CustomProperty +from otterdog.models.env_secret import EnvironmentSecret +from otterdog.models.env_variable import EnvironmentVariable from otterdog.models.environment import Environment from otterdog.models.organization_role import OrganizationRole from otterdog.models.organization_ruleset import OrganizationRuleset @@ -46,6 +48,7 @@ from otterdog.models.repo_workflow_settings import RepositoryWorkflowSettings from otterdog.models.repository import Repository from otterdog.models.team import Team +from otterdog.models.team_permission import TeamPermission from otterdog.utils import IndentingPrinter, associate_by_key, debug_times, jsonnet_evaluate_file if TYPE_CHECKING: @@ -172,7 +175,7 @@ async def validate( config.exclude_teams_pattern, provider=provider, ) - self.settings.validate(context, self) + self.settings.validate(context, self, None) enterprise_plan = self.settings.plan == "enterprise" @@ -184,16 +187,16 @@ async def validate( ) else: for role in self.roles: - role.validate(context, self) + role.validate(context, self, None) for team in self.teams: - team.validate(context, self) + team.validate(context, self, None) for webhook in self.webhooks: - webhook.validate(context, self) + webhook.validate(context, self, None) for secret in self.secrets: - secret.validate(context, self) + secret.validate(context, self, None) if len(self.rulesets) > 0 and not enterprise_plan: context.add_failure( @@ -203,12 +206,12 @@ async def validate( ) else: for ruleset in self.rulesets: - ruleset.validate(context, self) + ruleset.validate(context, self, None) # Run synchronous validations and collect repos needing API codescaning validation repos_needing_codescaning_language_validation = [] for repo in self.repositories: - repo.validate(context, self) + repo.validate(context, self, None) if repo.requires_language_validation(): repos_needing_codescaning_language_validation.append(repo) @@ -474,23 +477,27 @@ def to_jsonnet(self, config: JsonnetConfig, context: PatchContext) -> str: def generate_live_patch( self, current_organization: GitHubOrganization, context: LivePatchContext, handler: LivePatchHandler ) -> None: - OrganizationRole.generate_live_patch_of_list(self.roles, current_organization.roles, None, context, handler) - Team.generate_live_patch_of_list(self.teams, current_organization.teams, None, context, handler) - OrganizationSettings.generate_live_patch(self.settings, current_organization.settings, None, context, handler) + OrganizationRole.generate_live_patch_of_list( + self.roles, current_organization.roles, None, None, context, handler + ) + Team.generate_live_patch_of_list(self.teams, current_organization.teams, None, None, context, handler) + OrganizationSettings.generate_live_patch( + self.settings, current_organization.settings, None, None, context, handler + ) OrganizationWebhook.generate_live_patch_of_list( - self.webhooks, current_organization.webhooks, None, context, handler + self.webhooks, current_organization.webhooks, None, None, context, handler ) OrganizationSecret.generate_live_patch_of_list( - self.secrets, current_organization.secrets, None, context, handler + self.secrets, current_organization.secrets, None, None, context, handler ) OrganizationVariable.generate_live_patch_of_list( - self.variables, current_organization.variables, None, context, handler + self.variables, current_organization.variables, None, None, context, handler ) OrganizationRuleset.generate_live_patch_of_list( - self.rulesets, current_organization.rulesets, None, context, handler + self.rulesets, current_organization.rulesets, None, None, context, handler ) Repository.generate_live_patch_of_list( - self.repositories, current_organization.repositories, None, context, handler + self.repositories, current_organization.repositories, None, None, context, handler ) @classmethod @@ -583,6 +590,19 @@ async def _load_teams() -> None: continue team_members = await provider.get_org_team_members(github_id, team_slug) team["members"] = team_members + # Do the team-sync + sync_groups = await provider.get_org_team_sync_groups(github_id, team_slug) + if sync_groups: + team["team_sync_id"] = sync_groups[0].get("group_id", None) + team["team_sync_name"] = sync_groups[0].get("group_name", None) + team["team_sync_description"] = sync_groups[0].get("group_description", None) + else: + team["team_sync_id"] = None + team["team_sync_name"] = None + team["team_sync_description"] = None + # External Groups + external_groups = await provider.get_org_team_external_groups(github_id, team_slug) + team["external_groups"] = external_groups org.add_team(Team.from_provider_data(github_id, team)) else: _logger.debug("not reading teams, no default config available") @@ -672,6 +692,7 @@ async def _process_single_repo( repo_name: str, jsonnet_config: JsonnetConfig, teams: dict[str, Any], + repo_permissions: dict[str, list[dict[str, Any]]], app_installations: dict[str, str], ) -> tuple[str, Repository]: rest_api = gh_client.rest_api @@ -750,15 +771,60 @@ async def _process_single_repo( # get environments of the repo environments = await rest_api.repo.get_environments(github_id, repo_name) for github_environment in environments: - repo.add_environment(Environment.from_provider_data(github_id, github_environment)) + environment = Environment.from_provider_data(github_id, github_environment) + repo.add_environment(environment) + if jsonnet_config.default_env_variable_config is not None: + # get variables of the repo environment + variables = await rest_api.env.get_variables(github_id, repo.name, environment.name) + for github_variable in variables: + environment.add_variable(EnvironmentVariable.from_provider_data(github_id, github_variable)) + else: + _logger.debug("not reading repo env variables, no default config available") + if jsonnet_config.default_env_secret_config is not None: + # get secrets of the repo environment + secrets = await rest_api.env.get_secrets(github_id, repo.name, environment.name) + for github_secret in secrets: + environment.add_secret(EnvironmentSecret.from_provider_data(github_id, github_secret)) + else: + _logger.debug("not reading repo env secrets, no default config available") else: _logger.debug("not reading environments, no default config available") + if jsonnet_config.default_team_permission_config is not None: + team_permissions = repo_permissions.get(repo_name, []) + for github_team_permission in team_permissions: + repo.add_team_permission(TeamPermission.from_provider_data(github_id, github_team_permission)) + else: + _logger.debug("not reading team permissions, no default config available") _logger.debug("done retrieving data for repo '%s'", repo_name) return repo_name, repo +def build_repo_permissions(teams: list[dict[str, Any]]) -> dict[str, list[dict[str, Any]]]: + """ + Convert the output from the graphql call, which is team-centric, to a repository + centric structure. + """ + + repo_permissions: dict[str, list[dict[str, Any]]] = {} + for team in teams: + team_slug = team["slug"] + + # List of repo edges of this team + edges = team.get("repositories", {}).get("edges", []) + + for edge in edges: + repo_name = edge["node"]["name"] + permission = edge["permission"] + + if repo_name not in repo_permissions: + repo_permissions[repo_name] = [] + + repo_permissions[repo_name].append({"name": team_slug, "permission": permission}) + return repo_permissions + + async def _load_repos_from_provider( github_id: str, org_settings: OrganizationSettings, @@ -776,6 +842,7 @@ async def _load_repos_from_provider( repo_names = fnmatch.filter(repo_names, repo_filter) teams = {str(team["id"]): f"{github_id}/{team['slug']}" for team in await provider.get_org_teams(github_id)} + repo_permissions = build_repo_permissions(await provider.get_team_permissions(github_id)) # limit the number of repos that are processed concurrently to avoid hitting secondary rate limits sem = asyncio.Semaphore(50 if concurrency is None else concurrency) @@ -789,6 +856,7 @@ async def safe_process(repo_name): repo_name, jsonnet_config, teams, + repo_permissions, app_installations, ) diff --git a/otterdog/models/organization_ruleset.py b/otterdog/models/organization_ruleset.py index 05d3a6f0..c297e090 100644 --- a/otterdog/models/organization_ruleset.py +++ b/otterdog/models/organization_ruleset.py @@ -40,12 +40,13 @@ def model_object_name(self) -> str: def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: bool) -> str | None: return f"orgs.{jsonnet_config.create_org_ruleset}" - def validate(self, context: ValidationContext, parent_object: Any) -> None: - from otterdog.models.github_organization import GitHubOrganization + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + if TYPE_CHECKING: + from otterdog.models.github_organization import GitHubOrganization - super().validate(context, parent_object) + super().validate(context, parent_object, grandparent_object) - repositories = cast(GitHubOrganization, context.root_object).repositories + repositories = cast("GitHubOrganization", context.root_object).repositories all_repo_names = (x.name for x in repositories) if is_set_and_valid(self.include_repo_names): diff --git a/otterdog/models/organization_secret.py b/otterdog/models/organization_secret.py index 394c120f..1cbc7251 100644 --- a/otterdog/models/organization_secret.py +++ b/otterdog/models/organization_secret.py @@ -35,13 +35,14 @@ class OrganizationSecret(Secret): def model_object_name(self) -> str: return "org_secret" - def validate(self, context: ValidationContext, parent_object: Any) -> None: - super().validate(context, parent_object) + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + super().validate(context, parent_object, grandparent_object) if is_set_and_valid(self.visibility): - from .github_organization import GitHubOrganization + if TYPE_CHECKING: + from .github_organization import GitHubOrganization - org = cast(GitHubOrganization, parent_object) + org = cast("GitHubOrganization", parent_object) if self.visibility == "private" and org.settings.plan == "free": context.add_failure( FailureType.ERROR, diff --git a/otterdog/models/organization_settings.py b/otterdog/models/organization_settings.py index 8d8a226d..b6280810 100644 --- a/otterdog/models/organization_settings.py +++ b/otterdog/models/organization_settings.py @@ -100,7 +100,7 @@ def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: def include_field_for_patch_computation(self, field: dataclasses.Field) -> bool: return True - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: # execute custom validation rules if present self.execute_custom_validation_if_present(context, "validate-org-settings.py") @@ -143,10 +143,10 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: if is_set_and_present(self.custom_properties): for custom_property in self.custom_properties: - custom_property.validate(context, self) + custom_property.validate(context, self, None) if is_set_and_present(self.workflows): - self.workflows.validate(context, self) + self.workflows.validate(context, self, None) def get_model_objects(self) -> Iterator[tuple[ModelObject, ModelObject]]: if is_set_and_present(self.custom_properties): @@ -207,7 +207,7 @@ def to_jsonnet( extend: bool, default_object: ModelObject, ) -> None: - default_org_settings = cast(OrganizationSettings, default_object) + default_org_settings = cast("OrganizationSettings", default_object) patch = self.get_patch_to(default_object) @@ -238,7 +238,7 @@ def to_jsonnet( printer.println("],") if is_set_and_present(self.workflows): - default_workflow_settings = cast(OrganizationSettings, default_object).workflows + default_workflow_settings = cast("OrganizationSettings", default_object).workflows patch = self.workflows.get_patch_to(default_workflow_settings) if len(patch) > 0: @@ -254,6 +254,7 @@ def generate_live_patch( expected_object: OrganizationSettings | None, current_object: OrganizationSettings | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: @@ -265,7 +266,7 @@ def generate_live_patch( # this setting is only intended to disable any existing default configuration, it can not be enabled per se if "default_code_security_configurations_disabled" in modified_settings: change: Change[bool] = cast( - Change[bool], + "Change[bool]", modified_settings.get("default_code_security_configurations_disabled"), ) if change.from_value is True and change.to_value is False: @@ -278,6 +279,7 @@ def generate_live_patch( current_object, modified_settings, parent_object, + grandparent_object, False, cls.apply_live_patch, ) @@ -290,6 +292,7 @@ def generate_live_patch( expected_object.custom_properties, current_object.custom_properties, expected_object, + None, context, handler, ) diff --git a/otterdog/models/organization_variable.py b/otterdog/models/organization_variable.py index 9f5ac1b6..56924d11 100644 --- a/otterdog/models/organization_variable.py +++ b/otterdog/models/organization_variable.py @@ -35,13 +35,14 @@ class OrganizationVariable(Variable): def model_object_name(self) -> str: return "org_variable" - def validate(self, context: ValidationContext, parent_object: Any) -> None: - super().validate(context, parent_object) + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + super().validate(context, parent_object, grandparent_object) if is_set_and_valid(self.visibility): - from .github_organization import GitHubOrganization + if TYPE_CHECKING: + from .github_organization import GitHubOrganization - org = cast(GitHubOrganization, parent_object) + org = cast("GitHubOrganization", parent_object) if self.visibility == "private" and org.settings.plan == "free": context.add_failure( FailureType.ERROR, diff --git a/otterdog/models/organization_workflow_settings.py b/otterdog/models/organization_workflow_settings.py index ec18461c..b9e3748e 100644 --- a/otterdog/models/organization_workflow_settings.py +++ b/otterdog/models/organization_workflow_settings.py @@ -48,8 +48,8 @@ def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: return super().include_field_for_diff_computation(field) - def validate(self, context: ValidationContext, parent_object: Any) -> None: - super().validate(context, parent_object) + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + super().validate(context, parent_object, grandparent_object) if is_set_and_valid(self.enabled_repositories): if self.enabled_repositories not in {"all", "none", "selected"}: diff --git a/otterdog/models/repo_workflow_settings.py b/otterdog/models/repo_workflow_settings.py index 594a3b49..8a724bdd 100644 --- a/otterdog/models/repo_workflow_settings.py +++ b/otterdog/models/repo_workflow_settings.py @@ -42,9 +42,10 @@ def coerce_from_org_settings( if org_workflow_settings.enabled_repositories == "none": copy.enabled = UNSET # type: ignore - from otterdog.models.repository import Repository + if TYPE_CHECKING: + from otterdog.models.repository import Repository - repository_name = cast(Repository, parent_object).name + repository_name = cast("Repository", parent_object).name if ( org_workflow_settings.enabled_repositories == "selected" @@ -65,19 +66,21 @@ def coerce_from_org_settings( return copy - def validate(self, context: ValidationContext, parent_object: Any) -> None: - from .repository import Repository + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + if TYPE_CHECKING: + from .repository import Repository - super().validate(context, parent_object) + super().validate(context, parent_object, grandparent_object) - repo = cast(Repository, parent_object) + repo = cast("Repository", parent_object) actions_enabled = None if is_set_and_valid(self.enabled) and self.enabled is True: - from .github_organization import GitHubOrganization + if TYPE_CHECKING: + from .github_organization import GitHubOrganization actions_enabled = True - org_workflow_settings = cast(GitHubOrganization, context.root_object).settings.workflows + org_workflow_settings = cast("GitHubOrganization", context.root_object).settings.workflows if org_workflow_settings.enabled_repositories == "none": actions_enabled = False diff --git a/otterdog/models/repository.py b/otterdog/models/repository.py index 358b68c3..f77f059c 100644 --- a/otterdog/models/repository.py +++ b/otterdog/models/repository.py @@ -37,12 +37,12 @@ from .branch_protection_rule import BranchProtectionRule from .environment import Environment -from .organization_settings import OrganizationSettings from .repo_ruleset import RepositoryRuleset from .repo_secret import RepositorySecret from .repo_variable import RepositoryVariable from .repo_webhook import RepositoryWebhook from .repo_workflow_settings import RepositoryWorkflowSettings +from .team_permission import TeamPermission if TYPE_CHECKING: from collections.abc import Callable, Iterator @@ -50,6 +50,9 @@ from otterdog.jsonnet import JsonnetConfig from otterdog.providers.github import GitHubProvider + from .github_organization import GitHubOrganization + from .organization_settings import OrganizationSettings + @dataclasses.dataclass class Repository(ModelObject): @@ -120,6 +123,7 @@ class Repository(ModelObject): ) rulesets: list[RepositoryRuleset] = dataclasses.field(metadata={"nested_model": True}, default_factory=list) environments: list[Environment] = dataclasses.field(metadata={"nested_model": True}, default_factory=list) + team_permissions: list[TeamPermission] = dataclasses.field(metadata={"nested_model": True}, default_factory=list) _security_properties: ClassVar[list[str]] = [ "secret_scanning", @@ -243,6 +247,12 @@ def add_environment(self, environment: Environment) -> None: def set_environments(self, environments: list[Environment]) -> None: self.environments = environments + def add_team_permission(self, team_permission: TeamPermission) -> None: + self.team_permissions.append(team_permission) + + def set_team_permisions(self, team_permissions: list[TeamPermission]) -> None: + self.team_permissions = team_permissions + def coerce_from_org_settings(self, org_settings: OrganizationSettings, for_patch: bool = False) -> Repository: copy = dataclasses.replace(self) @@ -285,12 +295,11 @@ def requires_language_validation(self) -> bool: ) async def validate_code_scanning_languages(self, context: ValidationContext, parent_object: Any) -> None: - from .github_organization import GitHubOrganization # Only validate if provider is available and validation is required if self.requires_language_validation() and context.provider is not None: provider = context.provider - github_id = cast(GitHubOrganization, parent_object).github_id + github_id = cast("GitHubOrganization", parent_object).github_id try: languages = await provider.rest_api.repo.get_languages(github_id, self.name) @@ -341,11 +350,10 @@ async def validate_code_scanning_languages(self, context: ValidationContext, par f"{self.get_model_header(parent_object)} could not validate 'code_scanning_default_languages': {e}", ) - def validate(self, context: ValidationContext, parent_object: Any) -> None: - from .github_organization import GitHubOrganization + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: - github_id = cast(GitHubOrganization, parent_object).github_id - org_settings = cast(GitHubOrganization, parent_object).settings + github_id = cast("GitHubOrganization", parent_object).github_id + org_settings = cast("GitHubOrganization", parent_object).settings free_plan = org_settings.plan == "free" @@ -455,7 +463,7 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: ) for webhook in self.webhooks: - webhook.validate(context, self) + webhook.validate(context, self, None) if self.archived is True: if len(self.branch_protection_rules) > 0: @@ -659,22 +667,25 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: ) if is_set_and_present(self.workflows): - self.workflows.validate(context, self) + self.workflows.validate(context, self, None) for secret in self.secrets: - secret.validate(context, self) + secret.validate(context, self, None) for variable in self.variables: - variable.validate(context, self) + variable.validate(context, self, None) for bpr in self.branch_protection_rules: - bpr.validate(context, self) + bpr.validate(context, self, None) for rule in self.rulesets: - rule.validate(context, self) + rule.validate(context, self, None) for env in self.environments: - env.validate(context, self) + env.validate(context, self, None) + + for tp in self.team_permissions: + tp.validate(context, self, None) @staticmethod def _valid_topic(topic, search=re.compile(r"[^a-z0-9\-]").search): @@ -776,6 +787,10 @@ def get_model_objects(self) -> Iterator[tuple[ModelObject, ModelObject]]: yield env, self yield from env.get_model_objects() + for tp in self.team_permissions: + yield tp, self + yield from tp.get_model_objects() + @classmethod def get_mapping_from_model(cls) -> dict[str, Any]: mapping = super().get_mapping_from_model() @@ -796,6 +811,8 @@ def get_mapping_from_model(cls) -> dict[str, Any]: K(UNSET), S("workflows") >> F(lambda x: RepositoryWorkflowSettings.from_model_data(x)), ), + "team_permissions": OptionalS("team_permissions", default=[]) + >> Forall(lambda x: TeamPermission.from_model_data(x)), } ) @@ -863,6 +880,7 @@ def property_list_to_map(properties): "branch_protection_rules": K([]), "rulesets": K([]), "environments": K([]), + "team_permissions": K([]), "secret_scanning": OptionalS("security_and_analysis", "secret_scanning", "status", default=UNSET), "secret_scanning_push_protection": OptionalS( "security_and_analysis", @@ -992,13 +1010,13 @@ def resolve_secrets(self, secret_resolver: Callable[[str], str]) -> None: def copy_secrets(self, other_object: ModelObject) -> None: for webhook in self.webhooks: - other_repo = cast(Repository, other_object) + other_repo = cast("Repository", other_object) other_webhook = other_repo.get_webhook(webhook.url) if other_webhook is not None: webhook.copy_secrets(other_webhook) for secret in self.secrets: - other_repo = cast(Repository, other_object) + other_repo = cast("Repository", other_object) other_secret = other_repo.get_secret(secret.name) if other_secret is not None: secret.copy_secrets(other_secret) @@ -1014,7 +1032,7 @@ def to_jsonnet( extend: bool, default_object: ModelObject, ) -> None: - coerced_repo = self.coerce_from_org_settings(cast(OrganizationSettings, context.org_settings), for_patch=True) + coerced_repo = self.coerce_from_org_settings(cast("OrganizationSettings", context.org_settings), for_patch=True) patch = coerced_repo.get_patch_to(default_object) has_webhooks = len(self.webhooks) > 0 @@ -1023,6 +1041,7 @@ def to_jsonnet( has_branch_protection_rules = len(self.branch_protection_rules) > 0 has_rulesets = len(self.rulesets) > 0 has_environments = len(self.environments) > 0 + has_team_permissions = len(self.team_permissions) > 0 if "name" in patch: patch.pop("name") @@ -1042,11 +1061,11 @@ def to_jsonnet( write_patch_object_as_json(patch, printer, close_object=False) if is_set_and_present(self.workflows): - default_workflow_settings = cast(Repository, default_object).workflows + default_workflow_settings = cast("Repository", default_object).workflows if is_set_and_present(default_workflow_settings): coerced_settings = self.workflows.coerce_from_org_settings( - self, cast(OrganizationSettings, context.org_settings).workflows + self, cast("OrganizationSettings", context.org_settings).workflows ) patch = coerced_settings.get_patch_to(default_workflow_settings) if len(patch) > 0: @@ -1139,6 +1158,20 @@ def to_jsonnet( printer.level_down() printer.println("],") + # FIXME: support overrding team permissions for repos coming from + # the default configuration. + if has_team_permissions and not extend: + default_teampermission = TeamPermission.from_model_data(jsonnet_config.default_team_permission_config) + + printer.println("team_permissions: [") + printer.level_up() + + for tp in self.team_permissions: + tp.to_jsonnet(printer, jsonnet_config, context, False, default_teampermission) + + printer.level_down() + printer.println("],") + # close the repo object printer.level_down() printer.println("},") @@ -1149,17 +1182,22 @@ def generate_live_patch( expected_object: Repository | None, current_object: Repository | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: if expected_object is None: current_object = unwrap(current_object) - handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch)) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) return expected_object = unwrap(expected_object) - expected_org_settings = cast(OrganizationSettings, context.expected_org_settings) + expected_org_settings = cast("OrganizationSettings", context.expected_org_settings) coerced_object = expected_object.coerce_from_org_settings(expected_org_settings) # also coerce the workflow settings if present if is_set_and_present(coerced_object.workflows) and is_set_and_present(expected_org_settings.workflows): @@ -1170,19 +1208,23 @@ def generate_live_patch( changes_object_to_readonly = False if current_object is None: - handler(LivePatch.of_addition(coerced_object, parent_object, coerced_object.apply_live_patch)) + handler( + LivePatch.of_addition( + coerced_object, parent_object, grandparent_object, coerced_object.apply_live_patch + ) + ) else: if context.current_org_settings is not None: - current_org_settings = cast(OrganizationSettings, context.current_org_settings) + current_org_settings = cast("OrganizationSettings", context.current_org_settings) current_object = current_object.coerce_from_org_settings(current_org_settings) modified_repo: dict[str, Change[Any]] = coerced_object.get_difference_from(current_object) - is_archived = cast(Repository, coerced_object).archived + is_archived = cast("Repository", coerced_object).archived if is_archived is False and "web_commit_signoff_required" in context.modified_org_settings: change = context.modified_org_settings["web_commit_signoff_required"] if change.to_value is False: - web_commit_signoff_required = cast(Repository, coerced_object).web_commit_signoff_required + web_commit_signoff_required = cast("Repository", coerced_object).web_commit_signoff_required modified_repo["web_commit_signoff_required"] = Change( web_commit_signoff_required, web_commit_signoff_required ) @@ -1212,6 +1254,7 @@ def generate_live_patch( current_object, modified_repo, parent_object, + grandparent_object, False, coerced_object.apply_live_patch, changes_object_to_readonly, @@ -1222,6 +1265,7 @@ def generate_live_patch( coerced_object.webhooks, current_object.webhooks if current_object is not None else [], coerced_object, + None, context, handler, ) @@ -1230,6 +1274,7 @@ def generate_live_patch( coerced_object.secrets, current_object.secrets if current_object is not None else [], coerced_object, + None, context, handler, ) @@ -1238,6 +1283,7 @@ def generate_live_patch( coerced_object.variables, current_object.variables if current_object is not None else [], coerced_object, + None, context, handler, ) @@ -1246,6 +1292,7 @@ def generate_live_patch( coerced_object.environments, current_object.environments if current_object is not None else [], coerced_object, + None, context, handler, ) @@ -1258,6 +1305,7 @@ def generate_live_patch( coerced_object.branch_protection_rules, (current_object.branch_protection_rules if current_object is not None else []), coerced_object, + None, context, handler, ) @@ -1266,10 +1314,20 @@ def generate_live_patch( coerced_object.rulesets, current_object.rulesets if current_object is not None else [], coerced_object, + None, context, handler, ) + TeamPermission.generate_live_patch_of_list( + coerced_object.team_permissions, + current_object.team_permissions if current_object is not None else [], + coerced_object, + None, + context, + handler, + ) + @staticmethod def _include_squash_merge_patch_required_properties( patch: LivePatch[Repository], @@ -1283,13 +1341,13 @@ def _include_squash_merge_patch_required_properties( squash_merge_commit_message_present = "squash_merge_commit_message" in patch.changes if squash_merge_commit_title_present and not squash_merge_commit_message_present: - squash_merge_commit_message = cast(Repository, patch.current_object).squash_merge_commit_message + squash_merge_commit_message = cast("Repository", patch.current_object).squash_merge_commit_message patch.changes["squash_merge_commit_message"] = Change( squash_merge_commit_message, squash_merge_commit_message ) if squash_merge_commit_message_present and not squash_merge_commit_title_present: - squash_merge_commit_title = cast(Repository, patch.current_object).squash_merge_commit_title + squash_merge_commit_title = cast("Repository", patch.current_object).squash_merge_commit_title patch.changes["squash_merge_commit_title"] = Change( squash_merge_commit_title, squash_merge_commit_title ) @@ -1309,11 +1367,11 @@ def _include_gh_pages_patch_required_properties( gh_pages_source_path_present = "gh_pages_source_path" in patch.changes if gh_pages_source_path_present and not gh_pages_source_branch_present: - gh_pages_source_branch = cast(Repository, patch.current_object).gh_pages_source_branch + gh_pages_source_branch = cast("Repository", patch.current_object).gh_pages_source_branch patch.changes["gh_pages_source_branch"] = Change(gh_pages_source_branch, gh_pages_source_branch) if gh_pages_source_branch_present and not gh_pages_source_path_present: - gh_pages_source_path = cast(Repository, patch.current_object).gh_pages_source_path + gh_pages_source_path = cast("Repository", patch.current_object).gh_pages_source_path patch.changes["gh_pages_source_path"] = Change(gh_pages_source_path, gh_pages_source_path) return patch diff --git a/otterdog/models/role.py b/otterdog/models/role.py index 7a982091..2e23f43c 100644 --- a/otterdog/models/role.py +++ b/otterdog/models/role.py @@ -37,7 +37,7 @@ class Role(ModelObject, abc.ABC): permissions: list[str] base_role: str - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if is_set_and_valid(self.base_role): if self.base_role not in {"none", "read", "triage", "write", "maintain", "admin"}: context.add_failure( diff --git a/otterdog/models/ruleset.py b/otterdog/models/ruleset.py index aa01f3bd..f37fb41f 100644 --- a/otterdog/models/ruleset.py +++ b/otterdog/models/ruleset.py @@ -51,7 +51,7 @@ class PullRequestSettings(EmbeddedModelObject): requires_last_push_approval: bool requires_review_thread_resolution: bool - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: for key in self.keys(False): value = self.__getattribute__(key) if is_unset(value): @@ -122,7 +122,7 @@ class StatusCheckSettings(EmbeddedModelObject): strict: bool status_checks: list[str] - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: for key in ["strict", "status_checks"]: value = self.__getattribute__(key) if is_unset(value): @@ -176,7 +176,7 @@ async def get_app_ids(checks) -> dict[str, str]: for check in checks: if ":" in check: - app_slug, context = re.split(":", check, maxsplit=1) + app_slug, _ = re.split(":", check, maxsplit=1) if app_slug != "any" and " " not in app_slug: app_slugs.add(app_slug) @@ -232,7 +232,7 @@ class MergeQueueSettings(EmbeddedModelObject): status_check_timeout: int requires_all_group_entries_to_pass_required_checks: bool - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: for key in self.keys(False): value = self.__getattribute__(key) if is_unset(value): @@ -355,10 +355,11 @@ class Ruleset(ModelObject, abc.ABC): _roles: ClassVar[dict[str, str]] = {"5": "RepositoryAdmin", "4": "Write", "2": "Maintain", "1": "OrganizationAdmin"} _inverted_roles: ClassVar[dict[str, str]] = {v: k for k, v in _roles.items()} - def validate(self, context: ValidationContext, parent_object: Any) -> None: - from .github_organization import GitHubOrganization + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + if TYPE_CHECKING: + from .github_organization import GitHubOrganization - org_settings = cast(GitHubOrganization, context.root_object).settings + org_settings = cast("GitHubOrganization", context.root_object).settings if is_set_and_valid(self.target): if self.target not in {"branch", "tag", "push"}: @@ -434,9 +435,10 @@ def valid_tag_condition_pattern(pattern: str) -> bool: ) if self.requires_deployments is True and len(self.required_deployment_environments) > 0: - from .repository import Repository + if TYPE_CHECKING: + from .repository import Repository - environments = cast(Repository, parent_object).environments + environments = cast("Repository", parent_object).environments environments_by_name = associate_by_key(environments, lambda x: x.name) for env_name in self.required_deployment_environments: @@ -448,10 +450,10 @@ def valid_tag_condition_pattern(pattern: str) -> bool: ) if is_set_and_present(self.required_pull_request): - self.required_pull_request.validate(context, parent_object) + self.required_pull_request.validate(context, parent_object, None) if is_set_and_present(self.required_merge_queue): - self.required_merge_queue.validate(context, parent_object) + self.required_merge_queue.validate(context, parent_object, None) def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: if self.requires_deployments is False: @@ -775,7 +777,7 @@ def to_jsonnet( write_patch_object_as_json(patch, printer, close_object=False) if is_set_and_present(self.required_pull_request): - default_pull_request_config = cast(Ruleset, default_object).required_pull_request + default_pull_request_config = cast("Ruleset", default_object).required_pull_request if default_pull_request_config is None: default_pull_request_config = PullRequestSettings.from_model_data( jsonnet_config.default_pull_request_config @@ -797,7 +799,7 @@ def to_jsonnet( ) if is_set_and_present(self.required_merge_queue): - default_merge_queue_config = cast(Ruleset, default_object).required_merge_queue + default_merge_queue_config = cast("Ruleset", default_object).required_merge_queue if default_merge_queue_config is None: default_merge_queue_config = MergeQueueSettings.from_model_data( jsonnet_config.default_merge_queue_config @@ -819,7 +821,7 @@ def to_jsonnet( ) if is_set_and_present(self.required_status_checks): - default_status_check_config = cast(Ruleset, default_object).required_status_checks + default_status_check_config = cast("Ruleset", default_object).required_status_checks if default_status_check_config is None: default_status_check_config = StatusCheckSettings.from_model_data( jsonnet_config.default_status_checks_config diff --git a/otterdog/models/secret.py b/otterdog/models/secret.py index 16428bfd..f0da802b 100644 --- a/otterdog/models/secret.py +++ b/otterdog/models/secret.py @@ -42,7 +42,7 @@ class Secret(ModelObject, abc.ABC): name: str = dataclasses.field(metadata={"key": True}) value: str - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if self.has_dummy_secret(): context.add_failure( FailureType.INFO, f"{self.get_model_header()} only has a dummy value, resource will be skipped." @@ -104,7 +104,7 @@ def resolve_secrets(self, secret_resolver: Callable[[str], str]) -> None: def copy_secrets(self, other_object: ModelObject) -> None: if self.has_dummy_secret(): - self.value = cast(Secret, other_object).value + self.value = cast("Secret", other_object).value def update_dummy_secrets(self, new_value: str) -> None: if self.has_dummy_secret(): @@ -116,17 +116,26 @@ def generate_live_patch( expected_object: ST | None, current_object: ST | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: if current_object is None: expected_object = unwrap(expected_object) - handler(LivePatch.of_addition(expected_object, parent_object, expected_object.apply_live_patch)) + handler( + LivePatch.of_addition( + expected_object, parent_object, grandparent_object, expected_object.apply_live_patch + ) + ) return if expected_object is None: current_object = unwrap(current_object) - handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch)) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) return # if secrets shall be updated and the secret contains a valid secret perform a forced update. @@ -140,6 +149,7 @@ def generate_live_patch( current_object, modified_secret, parent_object, + grandparent_object, True, expected_object.apply_live_patch, ) @@ -170,6 +180,7 @@ def has_valid_secret(secret: Secret): current_object, modified_secret, parent_object, + grandparent_object, False, expected_object.apply_live_patch, ) diff --git a/otterdog/models/team.py b/otterdog/models/team.py index 288e0eab..b197a1f9 100644 --- a/otterdog/models/team.py +++ b/otterdog/models/team.py @@ -21,7 +21,7 @@ ModelObject, ValidationContext, ) -from otterdog.utils import UNSET, is_set_and_valid, unwrap +from otterdog.utils import UNSET, is_set_and_valid, is_unset, unwrap if TYPE_CHECKING: from otterdog.jsonnet import JsonnetConfig @@ -43,6 +43,10 @@ class Team(ModelObject, abc.ABC): privacy: str notifications: bool members: list[str] + team_sync_id: str | None + team_sync_name: str | None + team_sync_description: str | None + external_groups: str | None skip_members: bool = dataclasses.field(metadata={"model_only": True}, default=False) skip_non_organization_members: bool = dataclasses.field(metadata={"model_only": True}, default=False) @@ -59,7 +63,7 @@ def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: return True - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: # execute custom validation rules if present self.execute_custom_validation_if_present(context, "validate-team.py") @@ -98,6 +102,23 @@ def validate(self, context: ValidationContext, parent_object: Any) -> None: f"but 'members' contains user '{member}' who is not an organization member.", ) + values = [ + self.team_sync_id, + self.team_sync_name, + self.team_sync_description, + ] + + all_strings = all(is_set_and_valid(v) for v in values) + all_unset_or_none = all(is_unset(v) or v is None for v in values) + + if not all_strings and not all_unset_or_none: + context.add_failure( + FailureType.ERROR, + f"{self.get_model_header(parent_object)} has inconsistent team sync configuration: " + f"all of 'team_sync_id', 'team_sync_name', and 'team_sync_description' must either " + f"all be unset/None or all be valid strings.", + ) + @classmethod def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[str, Any]: mapping = super().get_mapping_from_provider(org_id, data) @@ -113,11 +134,17 @@ def transform_notification_setting(x: str | None): def transform_team_members(member): return member["login"] + def transform_external_groups(value): + if isinstance(value, list) and value: + return value[0]["group_id"] + return None + mapping.update( { "privacy": OptionalS("privacy") >> F(lambda x: "visible" if x == "closed" else x), "notifications": OptionalS("notification_setting") >> F(transform_notification_setting), "members": OptionalS("members", default=[]) >> Forall(transform_team_members), + "external_groups": OptionalS("external_groups") >> F(transform_external_groups), } ) return mapping diff --git a/otterdog/models/team_permission.py b/otterdog/models/team_permission.py new file mode 100644 index 00000000..0e46fdc4 --- /dev/null +++ b/otterdog/models/team_permission.py @@ -0,0 +1,123 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from __future__ import annotations + +import dataclasses +from typing import TYPE_CHECKING, Any + +from jsonbender import F, OptionalS + +from otterdog.models import ( + FailureType, + LivePatch, + LivePatchType, + ModelObject, + ValidationContext, +) +from otterdog.utils import ( + expect_type, + is_set_and_valid, + unwrap, +) + +if TYPE_CHECKING: + from otterdog.jsonnet import JsonnetConfig + from otterdog.providers.github import GitHubProvider + + +@dataclasses.dataclass +class TeamPermission(ModelObject): + """ + Represents a Team Permission on a Repository. + """ + + name: str = dataclasses.field(metadata={"key": True}) + permission: str + + @property + def model_object_name(self) -> str: + return "team_permission" + + def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: bool) -> str | None: + return f"orgs.{jsonnet_config.create_org_team_permission}" + + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: + if is_set_and_valid(self.permission): + if self.permission not in { + "pull", + "triage", + "push", + "maintain", + "admin", + "READ", + "WRITE", + "MAINTAIN", + "TRIAGE", + "ADMIN", + }: + context.add_failure( + FailureType.ERROR, + f"{self.get_model_header(parent_object)} has 'permission' of value '{self.permission}', " + f"while only values ('read/pull' | 'triage' | 'write/push' | 'maintain' | 'admin') are allowed.", + ) + + @classmethod + def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[str, Any]: + mapping = super().get_mapping_from_provider(org_id, data) + + def transform_permission(x): + to_provider = { + "READ": "pull", + "TRIAGE": "triage", + "WRITE": "push", + "MAINTAIN": "maintain", + "ADMIN": "admin", + } + return to_provider[x] + + mapping.update({"permission": OptionalS("permission") >> F(transform_permission)}) + return mapping + + @classmethod + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: + mapping = await super().get_mapping_to_provider(org_id, data, provider) + + return mapping + + @classmethod + async def apply_live_patch(cls, patch: LivePatch[TeamPermission], org_id: str, provider: GitHubProvider) -> None: + from .repository import Repository + + repository = expect_type(patch.parent_object, Repository) + + match patch.patch_type: + case LivePatchType.ADD: + await provider.add_team_permission( + org_id, + repository.name, + unwrap(patch.expected_object).name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) + + case LivePatchType.REMOVE: + await provider.delete_team_permission( + org_id, + repository.name, + unwrap(patch.current_object).name, + ) + + case LivePatchType.CHANGE: + await provider.update_team_permission( + org_id, + repository.name, + unwrap(patch.current_object).name, + await unwrap(patch.expected_object).to_provider_data(org_id, provider), + ) diff --git a/otterdog/models/variable.py b/otterdog/models/variable.py index cf7f2351..328d2939 100644 --- a/otterdog/models/variable.py +++ b/otterdog/models/variable.py @@ -26,10 +26,10 @@ class Variable(ModelObject, abc.ABC): name: str = dataclasses.field(metadata={"key": True}) value: str - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if self.name.startswith("GITHUB_"): context.add_failure( FailureType.ERROR, - f"{self.get_model_header(parent_object)} starts with prefix 'GITHUB_', " + f"{self.get_model_header(parent_object, grandparent_object)} starts with prefix 'GITHUB_', " f"which is not allowed for variables.", ) diff --git a/otterdog/models/webhook.py b/otterdog/models/webhook.py index 6ec4688a..65b32b4c 100644 --- a/otterdog/models/webhook.py +++ b/otterdog/models/webhook.py @@ -80,7 +80,7 @@ def include_field_for_patch_computation(self, field: dataclasses.Field) -> bool: def include_for_live_patch(self, context: LivePatchContext) -> bool: return not self.has_dummy_secret() - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if self.has_dummy_secret(): context.add_failure( FailureType.INFO, @@ -150,7 +150,7 @@ def resolve_secrets(self, secret_resolver: Callable[[str], str]) -> None: def copy_secrets(self, other_object: ModelObject) -> None: if self.has_dummy_secret(): - self.secret = cast(Webhook, other_object).secret + self.secret = cast("Webhook", other_object).secret def update_dummy_secrets(self, new_value: str) -> None: if self.has_dummy_secret(): @@ -162,17 +162,26 @@ def generate_live_patch( expected_object: WT | None, current_object: WT | None, parent_object: ModelObject | None, + grandparent_object: ModelObject | None, context: LivePatchContext, handler: LivePatchHandler, ) -> None: if current_object is None: expected_object = unwrap(expected_object) - handler(LivePatch.of_addition(expected_object, parent_object, expected_object.apply_live_patch)) + handler( + LivePatch.of_addition( + expected_object, parent_object, grandparent_object, expected_object.apply_live_patch + ) + ) return if expected_object is None: current_object = unwrap(current_object) - handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch)) + handler( + LivePatch.of_deletion( + current_object, parent_object, grandparent_object, current_object.apply_live_patch + ) + ) return # if webhooks shall be updated and the webhook contains a valid secret perform a forced update. @@ -190,6 +199,7 @@ def generate_live_patch( current_object, modified_webhook, parent_object, + grandparent_object, True, expected_object.apply_live_patch, ) @@ -229,6 +239,7 @@ def has_valid_secret(webhook: Webhook): current_object, modified_webhook, parent_object, + grandparent_object, False, expected_object.apply_live_patch, ) diff --git a/otterdog/models/workflow_settings.py b/otterdog/models/workflow_settings.py index a15d9347..04d5116e 100644 --- a/otterdog/models/workflow_settings.py +++ b/otterdog/models/workflow_settings.py @@ -73,7 +73,7 @@ def are_actions_more_or_equally_restricted(self, repo_allowed_actions: str) -> b return org_level >= repo_level return False - def validate(self, context: ValidationContext, parent_object: Any) -> None: + def validate(self, context: ValidationContext, parent_object: Any, grandparent_object: Any) -> None: if is_set_and_valid(self.allowed_actions): if self.allowed_actions not in {"all", "local_only", "selected"}: context.add_failure( diff --git a/otterdog/operations/__init__.py b/otterdog/operations/__init__.py index c7d77867..77ed5fd1 100644 --- a/otterdog/operations/__init__.py +++ b/otterdog/operations/__init__.py @@ -319,7 +319,7 @@ def _print_modified_internal( else: self.printer.println( f"{prefix}{key.ljust(max_key_length, ' ')} =" - f' {self._get_value(current_value)} [{color}]->[/] {self._get_value(expected_value)}' + f" {self._get_value(current_value)} [{color}]->[/] {self._get_value(expected_value)}" ) def _print_modified_dict_internal( @@ -340,9 +340,9 @@ def _print_modified_dict_internal( if isinstance(v, list): self._print_modified_internal(k, max_key_length, c_v, v, "[green]+ [/]", "green", forced_update) else: - self.printer.println(f"[green]+ [/]{k.ljust(max_key_length, ' ')} =" f" {self._get_value(v)}") + self.printer.println(f"[green]+ [/]{k.ljust(max_key_length, ' ')} = {self._get_value(v)}") elif v is None: - self.printer.println(f"[red]- [/]{k.ljust(max_key_length, ' ')} =" f" {self._get_value(c_v)}") + self.printer.println(f"[red]- [/]{k.ljust(max_key_length, ' ')} = {self._get_value(c_v)}") self._print_modified_internal(k, max_key_length, c_v, v, "[green]+ [/]", "green", forced_update) else: @@ -355,7 +355,7 @@ def _print_modified_dict_internal( if current_value is not None: for k, v in sorted(current_value.items()): if k not in processed_keys: - self.printer.println(f"[red]- [/]{k.ljust(max_key_length, ' ')} =" f" {self._get_value(v)}") + self.printer.println(f"[red]- [/]{k.ljust(max_key_length, ' ')} = {self._get_value(v)}") def _print_modified_list_internal( self, diff --git a/otterdog/operations/apply.py b/otterdog/operations/apply.py index 42b9f5ff..a5739b09 100644 --- a/otterdog/operations/apply.py +++ b/otterdog/operations/apply.py @@ -62,8 +62,9 @@ def handle_add_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: - super().handle_add_object(org_id, model_object, parent_object) + super().handle_add_object(org_id, model_object, parent_object, grandparent_object) self.execute_custom_hook_if_present_with_model_object(self.org_config, model_object, "pre-add-object-hook.py") def handle_delete_object( @@ -71,8 +72,9 @@ def handle_delete_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: - super().handle_delete_object(org_id, model_object, parent_object) + super().handle_delete_object(org_id, model_object, parent_object, grandparent_object) def handle_modified_object( self, @@ -82,6 +84,7 @@ def handle_modified_object( current_object: ModelObject, expected_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> int: modified = super().handle_modified_object( org_id, @@ -90,6 +93,7 @@ def handle_modified_object( current_object, expected_object, parent_object, + grandparent_object, ) return modified diff --git a/otterdog/operations/check_status.py b/otterdog/operations/check_status.py index 4ccf3cbe..7352bf65 100644 --- a/otterdog/operations/check_status.py +++ b/otterdog/operations/check_status.py @@ -85,6 +85,7 @@ def handle_add_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: ... def handle_delete_object( @@ -92,6 +93,7 @@ def handle_delete_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: ... def handle_modified_object( @@ -102,6 +104,7 @@ def handle_modified_object( current_object: ModelObject, expected_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> int: settings_to_change = 0 for k, _v in modified_object.items(): diff --git a/otterdog/operations/delete_file.py b/otterdog/operations/delete_file.py index a526e9fa..a35b09de 100644 --- a/otterdog/operations/delete_file.py +++ b/otterdog/operations/delete_file.py @@ -84,9 +84,7 @@ async def execute( if repo is not None and repo.archived is False: collected_error = None repo_name = f"{github_id}/{repo.name}" - self.printer.print( - f"Deleting file '[bold]{self.path}[/]' " f"in repository '[bold]{repo_name}[/]': " - ) + self.printer.print(f"Deleting file '[bold]{self.path}[/]' in repository '[bold]{repo_name}[/]': ") try: deleted_file = await rest_api.content.delete_content( diff --git a/otterdog/operations/diff_operation.py b/otterdog/operations/diff_operation.py index d10bc13e..9f37db71 100644 --- a/otterdog/operations/diff_operation.py +++ b/otterdog/operations/diff_operation.py @@ -183,11 +183,15 @@ def handle(patch: LivePatch) -> None: match patch.patch_type: case LivePatchType.ADD: - self.handle_add_object(github_id, unwrap(patch.expected_object), patch.parent_object) + self.handle_add_object( + github_id, unwrap(patch.expected_object), patch.parent_object, patch.grandparent_object + ) diff_status.additions += 1 case LivePatchType.REMOVE: - self.handle_delete_object(github_id, unwrap(patch.current_object), patch.parent_object) + self.handle_delete_object( + github_id, unwrap(patch.current_object), patch.parent_object, patch.grandparent_object + ) diff_status.deletions += 1 case LivePatchType.CHANGE: @@ -265,6 +269,7 @@ def handle_add_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: ... @abstractmethod @@ -273,6 +278,7 @@ def handle_delete_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: ... @abstractmethod @@ -284,6 +290,7 @@ def handle_modified_object( current_object: ModelObject, expected_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> int: ... @abstractmethod diff --git a/otterdog/operations/fetch_config.py b/otterdog/operations/fetch_config.py index 04c89b8f..4948c77d 100644 --- a/otterdog/operations/fetch_config.py +++ b/otterdog/operations/fetch_config.py @@ -103,7 +103,7 @@ async def execute( if self.pull_request is not None: self.printer.println( - f"organization definition fetched from pull request " f"#{self.pull_request} to '{org_file_name}'" + f"organization definition fetched from pull request #{self.pull_request} to '{org_file_name}'" ) else: self.printer.println(f"organization definition fetched from default branch to '{org_file_name}'") diff --git a/otterdog/operations/open_pull_request.py b/otterdog/operations/open_pull_request.py index 6a73bf7a..7e1942fb 100644 --- a/otterdog/operations/open_pull_request.py +++ b/otterdog/operations/open_pull_request.py @@ -140,8 +140,7 @@ async def execute( ) except RuntimeError as e: self.printer.print_error( - "failed to open pull request in repo " - f"'{org_config.github_id}/{org_config.config_repo}': {e!s}" + f"failed to open pull request in repo '{org_config.github_id}/{org_config.config_repo}': {e!s}" ) return 1 diff --git a/otterdog/operations/plan.py b/otterdog/operations/plan.py index 7def9baf..3821dc3a 100644 --- a/otterdog/operations/plan.py +++ b/otterdog/operations/plan.py @@ -61,9 +61,10 @@ def handle_add_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: self.printer.println() - model_header = model_object.get_model_header(parent_object) + model_header = model_object.get_model_header(parent_object, grandparent_object) self.print_dict( model_object.to_model_dict(for_diff=True, include_model_only_fields=True, exclude_none_values=True), f"add {model_header}", @@ -76,9 +77,10 @@ def handle_delete_object( org_id: str, model_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> None: self.printer.println() - model_header = model_object.get_model_header(parent_object) + model_header = model_object.get_model_header(parent_object, grandparent_object) self.print_dict( model_object.to_model_dict(for_diff=True), f"remove {model_header}", @@ -94,9 +96,10 @@ def handle_modified_object( current_object: ModelObject, expected_object: ModelObject, parent_object: ModelObject | None = None, + grandparent_object: ModelObject | None = None, ) -> int: self.printer.println() - model_header = expected_object.get_model_header(parent_object) + model_header = expected_object.get_model_header(parent_object, grandparent_object) self.print_modified_dict(modified_object, model_header, forced_update) # FIXME: this code should be moved to the Webhook model class. diff --git a/otterdog/operations/push_config.py b/otterdog/operations/push_config.py index 5326c2d8..43053b9d 100644 --- a/otterdog/operations/push_config.py +++ b/otterdog/operations/push_config.py @@ -160,7 +160,7 @@ async def _display_diff(self, org_config: OrganizationConfig, provider: GitHubPr self.printer.println() self.printer.println("No configuration yet available.") self.printer.println( - "Do you want to push these changes? " "(Only 'yes' or 'y' will be accepted as approval)\n" + "Do you want to push these changes? (Only 'yes' or 'y' will be accepted as approval)\n" ) self.printer.print("[bold]Enter a value:[/] ") @@ -194,7 +194,7 @@ async def _display_diff(self, org_config: OrganizationConfig, provider: GitHubPr if not self.force_processing: self.printer.println() self.printer.println( - "Do you want to push these changes? " "(Only 'yes' or 'y' will be accepted as approval)\n" + "Do you want to push these changes? (Only 'yes' or 'y' will be accepted as approval)\n" ) self.printer.print("[bold]Enter a value:[/] ") diff --git a/otterdog/providers/github/__init__.py b/otterdog/providers/github/__init__.py index ee2c8555..92e1e18d 100644 --- a/otterdog/providers/github/__init__.py +++ b/otterdog/providers/github/__init__.py @@ -166,6 +166,12 @@ async def get_org_teams(self, org_id: str) -> list[dict[str, Any]]: async def get_org_team_members(self, org_id: str, team_slug: str) -> list[dict[str, Any]]: return await self.rest_api.team.get_team_members(org_id, team_slug) + async def get_org_team_sync_groups(self, org_id: str, team_slug: str) -> list[dict[str, Any]]: + return await self.rest_api.team.get_team_sync_groups(org_id, team_slug) + + async def get_org_team_external_groups(self, org_id: str, team_slug: str) -> list[dict[str, Any]]: + return await self.rest_api.team.get_team_external_groups(org_id, team_slug) + async def add_org_team(self, org_id: str, team_name: str, data: dict[str, str]) -> None: await self.rest_api.team.add_team(org_id, team_name, data) @@ -422,6 +428,53 @@ async def add_repo_variable(self, org_id: str, repo_name: str, data: dict[str, s async def delete_repo_variable(self, org_id: str, repo_name: str, variable_name: str) -> None: await self.rest_api.repo.delete_variable(org_id, repo_name, variable_name) + async def get_env_secrets(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: + return await self.rest_api.env.get_secrets(org_id, repo_name, env_name) + + async def update_env_secret( + self, org_id: str, repo_name: str, env_name: str, secret_name: str, secret: dict[str, Any] + ) -> None: + if len(secret) > 0: + await self.rest_api.env.update_secret(org_id, repo_name, env_name, secret_name, secret) + + async def add_env_secret(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: + await self.rest_api.env.add_secret(org_id, repo_name, env_name, data) + + async def delete_env_secret(self, org_id: str, repo_name: str, env_name: str, secret_name: str) -> None: + await self.rest_api.env.delete_secret(org_id, repo_name, env_name, secret_name) + + async def get_env_variables(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: + return await self.rest_api.env.get_variables(org_id, repo_name, env_name) + + async def update_env_variable( + self, org_id: str, repo_name: str, env_name: str, variable_name: str, variable: dict[str, Any] + ) -> None: + if len(variable) > 0: + await self.rest_api.env.update_variable(org_id, repo_name, env_name, variable_name, variable) + + async def add_env_variable(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: + await self.rest_api.env.add_variable(org_id, repo_name, env_name, data) + + async def delete_env_variable(self, org_id: str, repo_name: str, env_name: str, variable_name: str) -> None: + await self.rest_api.env.delete_variable(org_id, repo_name, env_name, variable_name) + + async def get_team_permissions(self, org_id: str) -> list[dict[str, Any]]: + return await self.graphql_client.get_team_permissions(org_id) + + async def update_team_permission( + self, org_id: str, repo_name: str, team_name: str, team_permission: dict[str, str] + ) -> None: + if len(team_permission) > 0: + await self.rest_api.repo.update_team_permission(org_id, repo_name, team_name, team_permission) + + async def add_team_permission( + self, org_id: str, repo_name: str, team_name: str, team_permission: dict[str, str] + ) -> None: + await self.rest_api.repo.add_team_permission(org_id, repo_name, team_name, team_permission) + + async def delete_team_permission(self, org_id: str, repo_name: str, team_name: str) -> None: + await self.rest_api.repo.delete_team_permission(org_id, repo_name, team_name) + async def dispatch_workflow(self, org_id: str, repo_name: str, workflow_name: str) -> bool: return await self.rest_api.repo.dispatch_workflow(org_id, repo_name, workflow_name) diff --git a/otterdog/providers/github/exception.py b/otterdog/providers/github/exception.py index e4a3042b..9ed260a5 100644 --- a/otterdog/providers/github/exception.py +++ b/otterdog/providers/github/exception.py @@ -70,4 +70,4 @@ def missing_scopes(self) -> list[str]: return self.__missing_scopes def __str__(self): - return f"Insufficient permissions while accessing '{self.url}': " f"(missing scopes={self.missing_scopes})" + return f"Insufficient permissions while accessing '{self.url}': (missing scopes={self.missing_scopes})" diff --git a/otterdog/providers/github/graphql.py b/otterdog/providers/github/graphql.py index 529f17bf..ebd55ea0 100644 --- a/otterdog/providers/github/graphql.py +++ b/otterdog/providers/github/graphql.py @@ -178,9 +178,7 @@ async def add_branch_protection_rule( self, org_id: str, repo_name: str, repo_node_id: str, data: dict[str, Any] ) -> None: rule_pattern = data["pattern"] - _logger.debug( - f"creating branch_protection_rule with pattern '{rule_pattern}' " f"for repo '{org_id}/{repo_name}'" - ) + _logger.debug(f"creating branch_protection_rule with pattern '{rule_pattern}' for repo '{org_id}/{repo_name}'") data["repositoryId"] = repo_node_id variables = {"ruleInput": data} @@ -257,11 +255,46 @@ async def get_team_membership(self, org_id: str, user_login: str) -> list[dict[s variables = {"owner": org_id, "user": user_login} return await self._run_paged_query(variables, "get-team-membership.gql", "data.organization.teams") + async def get_team_permissions(self, org_id: str) -> list[dict[str, Any]]: + _logger.debug(f"retrieving team permissions in org '{org_id}'") + + variables = {"org": org_id} + # Run the graphql query with a limit of 100 for teams and repositories. If there are more than + # 100 teams available this gets handled in _run_paged_query. If there are more than 100 + # permissions to repositories available then these are handled in the subsequent loop, where the + # pageInfo is used to get the missing repository entries. + teams = await self._run_paged_query( + input_variables=variables, + query_file="get-team-permissions-repositories.gql", + prefix_selector="data.organization.teams", + ) + for team in teams: + repos = team["repositories"]["edges"] + page_info = team["repositories"]["pageInfo"] + if not page_info["hasNextPage"]: + continue + repo_cursor = page_info["endCursor"] + sub_vars = { + "org": org_id, + "teamSlug": team["slug"], + "endCursor": repo_cursor, + } + sub_result = await self._run_paged_query( + input_variables=sub_vars, + query_file="get-repository-permissions-of-team.gql", + prefix_selector="data.organization.team.repositories", + selector_type=".edges", + ) + repos.extend(sub_result) + + return teams + async def _run_paged_query( self, input_variables: dict[str, Any], query_file: str, prefix_selector: str = "data.repository.branchProtectionRules", + selector_type: str = ".nodes", ) -> list[dict[str, Any]]: _logger.debug(f"running graphql query '{query_file}' with input '{json.dumps(input_variables)}'") @@ -281,8 +314,8 @@ async def _run_paged_query( if is_trace_enabled(): _logger.trace("graphql result = %s", json.dumps(json_data, indent=2)) - if "data" in json_data: - rules_result = query_json(prefix_selector + ".nodes", json_data) + if status < 400 and "data" in json_data: + rules_result = query_json(prefix_selector + selector_type, json_data) for rule in rules_result: result.append(rule) diff --git a/otterdog/providers/github/rest/__init__.py b/otterdog/providers/github/rest/__init__.py index 2bddd162..31c2a6d9 100644 --- a/otterdog/providers/github/rest/__init__.py +++ b/otterdog/providers/github/rest/__init__.py @@ -136,6 +136,12 @@ def meta(self): return MetaClient(self) + @cached_property + def env(self): + from .env_client import EnvClient + + return EnvClient(self) + class RestClient: def __init__(self, rest_api: RestApi): diff --git a/otterdog/providers/github/rest/env_client.py b/otterdog/providers/github/rest/env_client.py new file mode 100644 index 00000000..927351f5 --- /dev/null +++ b/otterdog/providers/github/rest/env_client.py @@ -0,0 +1,169 @@ +# ******************************************************************************* +# Copyright (c) 2024-2025 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +import json +from typing import Any + +from otterdog.logging import get_logger +from otterdog.providers.github.exception import GitHubException +from otterdog.providers.github.rest import RestApi, RestClient, encrypt_value + +_logger = get_logger(__name__) + + +class EnvClient(RestClient): + def __init__(self, rest_api: RestApi): + super().__init__(rest_api) + + async def get_secrets(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: + _logger.debug("retrieving secrets for repo env '%s/%s:%s'", org_id, repo_name, env_name) + + try: + status, body = await self.requester.request_raw( + "GET", f"/repos/{org_id}/{repo_name}/environments/{env_name}/secrets" + ) + if status == 200: + return json.loads(body)["secrets"] + else: + return [] + except GitHubException as ex: + raise RuntimeError( + f"failed retrieving secrets for repo env '{org_id}/{repo_name}:{env_name}':\n{ex}" + ) from ex + + async def update_secret( + self, org_id: str, repo_name: str, env_name: str, secret_name: str, secret: dict[str, Any] + ) -> None: + _logger.debug("updating repo env secret '%s' for repo env '%s/%s:%s'", secret_name, org_id, repo_name, env_name) + + if "name" in secret: + secret.pop("name") + + await self._encrypt_secret_inplace(org_id, repo_name, env_name, secret) + + status, _ = await self.requester.request_raw( + "PUT", + f"/repos/{org_id}/{repo_name}/environments/{env_name}/secrets/{secret_name}", + json.dumps(secret), + ) + + if status != 204: + raise RuntimeError(f"failed to update repo env secret '{secret_name}'") + + _logger.debug("updated repo env secret '%s'", secret_name) + + async def add_secret(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: + secret_name = data.pop("name") + _logger.debug("adding repo env secret '%s' for repo env '%s/%s:%s'", secret_name, org_id, repo_name, env_name) + + await self._encrypt_secret_inplace(org_id, repo_name, env_name, data) + + status, _ = await self.requester.request_raw( + "PUT", + f"/repos/{org_id}/{repo_name}/environments/{env_name}/secrets/{secret_name}", + json.dumps(data), + ) + + if status != 201: + raise RuntimeError(f"failed to add repo env secret '{secret_name}'") + + _logger.debug("added repo env secret '%s'", secret_name) + + async def _encrypt_secret_inplace(self, org_id: str, repo_name: str, env_name: str, data: dict[str, Any]) -> None: + value = data.pop("value") + key_id, public_key = await self.get_public_key(org_id, repo_name, env_name) + data["encrypted_value"] = encrypt_value(public_key, value) + data["key_id"] = key_id + + async def delete_secret(self, org_id: str, repo_name: str, env_name: str, secret_name: str) -> None: + _logger.debug("deleting repo env secret '%s' for repo env '%s/%s:%s'", secret_name, org_id, repo_name, env_name) + + status, _ = await self.requester.request_raw( + "DELETE", f"/repos/{org_id}/{repo_name}/environments/{env_name}/secrets/{secret_name}" + ) + + if status != 204: + raise RuntimeError(f"failed to delete repo env secret '{secret_name}'") + + _logger.debug("removed repo env secret '%s'", secret_name) + + async def get_public_key(self, org_id: str, repo_name: str, env_name: str) -> tuple[str, str]: + _logger.debug("retrieving repo public key for repo env '%s/%s:%s'", org_id, repo_name, env_name) + + try: + response = await self.requester.request_json( + "GET", f"/repos/{org_id}/{repo_name}/environments/{env_name}/secrets/public-key" + ) + return response["key_id"], response["key"] + except GitHubException as ex: + raise RuntimeError(f"failed retrieving repo env public key:\n{ex}") from ex + + async def get_variables(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: + _logger.debug("retrieving variables for repo env '%s/%s:%s'", org_id, repo_name, env_name) + + try: + status, body = await self.requester.request_raw( + "GET", f"/repos/{org_id}/{repo_name}/environments/{env_name}/variables" + ) + if status == 200: + return json.loads(body)["variables"] + else: + return [] + except GitHubException as ex: + raise RuntimeError( + f"failed retrieving variables for repo env'{org_id}/{repo_name}:{env_name}':\n{ex}" + ) from ex + + async def update_variable( + self, org_id: str, repo_name: str, env_name: str, variable_name: str, variable: dict[str, Any] + ) -> None: + _logger.debug("updating repo env variable '%s' for repo '%s/%s:%s'", variable_name, org_id, repo_name, env_name) + + if "name" in variable: + variable.pop("name") + + status, body = await self.requester.request_raw( + "PATCH", + f"/repos/{org_id}/{repo_name}/environments/{env_name}/variables/{variable_name}", + json.dumps(variable), + ) + if status != 204: + raise RuntimeError(f"failed to update repo env variable '{variable_name}': {body}") + + _logger.debug("updated repo env variable '%s'", variable_name) + + async def add_variable(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: + variable_name = data.get("name") + _logger.debug( + "adding repo env variable '%s' for repo env '%s/%s:%s'", variable_name, org_id, repo_name, env_name + ) + + status, body = await self.requester.request_raw( + "POST", + f"/repos/{org_id}/{repo_name}/environments/{env_name}/variables", + json.dumps(data), + ) + + if status != 201: + raise RuntimeError(f"failed to add repo env variable '{variable_name}': {body}") + + _logger.debug("added repo env variable '%s'", variable_name) + + async def delete_variable(self, org_id: str, repo_name: str, env_name: str, variable_name: str) -> None: + _logger.debug( + "deleting repo env variable '%s' for repo env '%s/%s'", variable_name, org_id, repo_name, env_name + ) + + status, _ = await self.requester.request_raw( + "DELETE", f"/repos/{org_id}/{repo_name}/environments/{env_name}/variables/{variable_name}" + ) + + if status != 204: + raise RuntimeError(f"failed to delete repo env variable '{variable_name}'") + + _logger.debug("removed repo env variable '%s'", variable_name) diff --git a/otterdog/providers/github/rest/org_client.py b/otterdog/providers/github/rest/org_client.py index 0d39598f..914d6a4d 100644 --- a/otterdog/providers/github/rest/org_client.py +++ b/otterdog/providers/github/rest/org_client.py @@ -95,7 +95,7 @@ async def list_security_managers(self, org_id: str, role_id: str) -> list[str]: result = await self.requester.request_json("GET", f"/orgs/{org_id}/organization-roles/{role_id}/teams") return [x["slug"] for x in result] except GitHubException as ex: - raise RuntimeError(f"failed retrieving security managers for org " f"'{org_id}':\n{ex}") from ex + raise RuntimeError(f"failed retrieving security managers for org '{org_id}':\n{ex}") from ex async def update_security_managers(self, org_id: str, security_managers: list[str]) -> None: _logger.debug("updating security managers for org '%s'", org_id) @@ -142,7 +142,7 @@ async def assign_role_to_team(self, org_id: str, role_id: str, team_slug: str) - ) else: raise RuntimeError( - f"failed assigning role '{role_id}' to team '{team_slug}' in org '{org_id}'" f"\n{status}: {body}" + f"failed assigning role '{role_id}' to team '{team_slug}' in org '{org_id}'\n{status}: {body}" ) async def remove_role_from_team(self, org_id: str, role_id: str, team_slug: str) -> None: @@ -153,7 +153,7 @@ async def remove_role_from_team(self, org_id: str, role_id: str, team_slug: str) ) if status != 204: raise RuntimeError( - f"failed removing role '{role_id}' from team '{team_slug}' in org '{org_id}'" f"\n{status}: {body}" + f"failed removing role '{role_id}' from team '{team_slug}' in org '{org_id}'\n{status}: {body}" ) _logger.debug("removed role '%s' from team '%s' in org '%s'", role_id, team_slug, org_id) @@ -527,7 +527,7 @@ async def update_workflow_settings(self, org_id: str, data: dict[str, Any]) -> N ) if status != 204: - raise RuntimeError(f"failed to update workflow settings for org '{org_id}'" f"\n{status}: {body}") + raise RuntimeError(f"failed to update workflow settings for org '{org_id}'\n{status}: {body}") _logger.debug("updated workflow settings for org '%s'", org_id) @@ -569,7 +569,7 @@ async def _update_selected_repositories_for_workflow_settings( if status != 204: raise RuntimeError( - f"failed updating selected repositories for workflow settings of org '{org_id}'" f"\n{status}: {body}" + f"failed updating selected repositories for workflow settings of org '{org_id}'\n{status}: {body}" ) _logger.debug("updated selected repositories for workflow settings of org '%s'", org_id) @@ -590,7 +590,7 @@ async def _update_selected_actions_for_workflow_settings(self, org_id: str, data ) if status != 204: - raise RuntimeError(f"failed updating allowed actions for org '{org_id}'" f"\n{status}: {body}") + raise RuntimeError(f"failed updating allowed actions for org '{org_id}'\n{status}: {body}") _logger.debug("updated allowed actions for org '%s'", org_id) @@ -610,7 +610,7 @@ async def _update_default_workflow_permissions(self, org_id: str, data: dict[str ) if status != 204: - raise RuntimeError(f"failed updating default workflow permissions for org '{org_id}'" f"\n{status}: {body}") + raise RuntimeError(f"failed updating default workflow permissions for org '{org_id}'\n{status}: {body}") _logger.debug("updated default workflow permissions for org '%s'", org_id) diff --git a/otterdog/providers/github/rest/reference_client.py b/otterdog/providers/github/rest/reference_client.py index 6a27340f..6ab9033c 100644 --- a/otterdog/providers/github/rest/reference_client.py +++ b/otterdog/providers/github/rest/reference_client.py @@ -63,6 +63,4 @@ async def delete_reference(self, org_id: str, repo_name: str, ref: str) -> bool: elif status in (409, 422): return False else: - raise RuntimeError( - f"failed deleting reference '{ref}' in repo '{org_id}/{repo_name}'" f"\n{status}: {body}" - ) + raise RuntimeError(f"failed deleting reference '{ref}' in repo '{org_id}/{repo_name}'\n{status}: {body}") diff --git a/otterdog/providers/github/rest/repo_client.py b/otterdog/providers/github/rest/repo_client.py index b560741f..7065223d 100644 --- a/otterdog/providers/github/rest/repo_client.py +++ b/otterdog/providers/github/rest/repo_client.py @@ -741,6 +741,47 @@ async def delete_environment(self, org_id: str, repo_name: str, env_name: str) - _logger.debug("removed repo environment '%s'", env_name) + async def get_team_permissions(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + _logger.debug("retrieving teams with permissions for repo '%s/%s'", org_id, repo_name) + + try: + return await self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/teams") + except GitHubException as ex: + raise RuntimeError(f"failed getting team permissions for repo '{org_id}/{repo_name}':\n{ex}") from ex + + async def update_team_permission( + self, org_id: str, repo_name: str, team_name: str, team_permission: dict[str, Any] + ) -> None: + if "name" in team_permission: + team_permission.pop("name") + + status, _ = await self.requester.request_raw( + "PUT", f"/orgs/{org_id}/teams/{team_name}/repos/{org_id}/{repo_name}", data=json.dumps(team_permission) + ) + + if status != 204: + raise RuntimeError(f"failed to update team permission for team {team_name} on repo {repo_name}") + + _logger.debug(f"updated team permission for team {team_name} on repo {repo_name}") + + async def add_team_permission( + self, org_id: str, repo_name: str, team_name: str, team_permission: dict[str, Any] + ) -> None: + _logger.debug(f"adding team permission for team {team_name} on repo {repo_name}") + await self.update_team_permission(org_id, repo_name, team_name, team_permission) + _logger.debug("added team permisson for team {team_name} on repo {repo_name}") + + async def delete_team_permission(self, org_id: str, repo_name: str, team_name: str) -> None: + _logger.debug(f"deleting team permission for team {team_name} on repo {repo_name}") + status, _ = await self.requester.request_raw( + "DELETE", f"/orgs/{org_id}/teams/{team_name}/repos/{org_id}/{repo_name}" + ) + + if status != 204: + raise RuntimeError(f"failed to delete team permission for team {team_name} on repo {repo_name}") + + _logger.debug(f"removed team permission for team {team_name} on repo {repo_name}") + async def _get_deployment_branch_policies(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: _logger.debug("retrieving deployment branch policies for env '%s'", env_name) diff --git a/otterdog/providers/github/rest/team_client.py b/otterdog/providers/github/rest/team_client.py index fdd4d728..fcfefe3e 100644 --- a/otterdog/providers/github/rest/team_client.py +++ b/otterdog/providers/github/rest/team_client.py @@ -82,6 +82,15 @@ async def add_team(self, org_id: str, team_name: str, data: dict[str, str]) -> s members = data["members"] for user in members: await self.add_member_to_team(org_id, team_slug, user) + if all(k in data and data[k] is not None for k in ("team_sync_id", "team_sync_name", "team_sync_description")): + tsdata = { + "group_id": data["team_sync_id"], + "group_name": data["team_sync_name"], + "group_description": data["team_sync_description"], + } + await self.update_team_sync_groups(org_id, team_slug, tsdata) + if "external_groups" in data and data["external_groups"] is not None: + await self.update_team_external_groups(org_id, team_slug, data["external_groups"]) _logger.debug("added team '%s'", team_name) return team_slug @@ -94,6 +103,19 @@ async def update_team(self, org_id: str, team_slug: str, team: dict[str, Any]) - if "members" in team: await self.update_team_members(org_id, team_slug, team["members"]) + if all( + k in team and team[k] is not None for k in ("team_sync_id", "team_sync_name", "team_sync_description") + ): + tsdata = { + "group_id": team["team_sync_id"], + "group_name": team["team_sync_name"], + "group_description": team["team_sync_description"], + } + await self.update_team_sync_groups(org_id, team_slug, tsdata) + elif all(k in team for k in ("team_sync_id", "team_sync_name", "team_sync_description")): + await self.update_team_sync_groups(org_id, team_slug, None) + if "external_groups" in team: + await self.update_team_external_groups(org_id, team_slug, team["external_groups"]) _logger.debug("updated team '%s'", team_slug) except GitHubException as ex: @@ -180,3 +202,73 @@ async def get_membership(self, org_id: str, user_name: str) -> dict[str, Any]: return await self.requester.request_json("GET", f"/orgs/{org_id}/memberships/{user_name}") except GitHubException as ex: raise RuntimeError(f"failed retrieving membership for user '{user_name}' in org '{org_id}':\n{ex}") from ex + + async def get_team_sync_groups(self, org_id: str, team_slug: str) -> list[dict[str, Any]]: + _logger.debug("retrieving team sync groups for team '%s/%s'", org_id, team_slug) + + try: + response = await self.requester.request_json( + "GET", f"/orgs/{org_id}/teams/{team_slug}/team-sync/group-mappings" + ) + except GitHubException as ex: + # Only suppress 404 (endpoint not available) + if ex.status in (403, 404): + _logger.debug("team sync endpoint not available for team '%s/%s' (404)", org_id, team_slug) + return [] + # All other errors must be raised + raise RuntimeError(f"failed retrieving team sync groups for team '{org_id}/{team_slug}':\n{ex}") from ex + + return response.get("groups", []) + + async def update_team_sync_groups(self, org_id: str, team_slug: str, group: dict[str, str] | None) -> None: + _logger.debug("updating sync_groups for team '%s' in org '%s'", team_slug, org_id) + data = {"groups": []} if group is None else {"groups": [group]} + status, body = await self.requester.request_raw( + "PATCH", f"/orgs/{org_id}/teams/{team_slug}/team-sync/group-mappings", data=json.dumps(data) + ) + + if status == 200: + _logger.debug("updated team-sync '%s' of team '%s' for org '%s'", group, team_slug, org_id) + else: + raise RuntimeError( + f"failed updating team-sync '{group}' to team '{team_slug}' in org '{org_id}'\n{status}: {body}" + ) + + async def get_team_external_groups(self, org_id: str, team_slug: str) -> list[dict[str, Any]]: + _logger.debug("retrieving external groups for team '%s/%s'", org_id, team_slug) + + try: + response = await self.requester.request_json("GET", f"/orgs/{org_id}/teams/{team_slug}/external-groups") + except GitHubException as ex: + # Only suppress 404 (endpoint not available) + if ex.status in (400, 404): + _logger.debug("external groups endpoint not available for team '%s/%s' (404)", org_id, team_slug) + return [] + raise RuntimeError(f"failed retrieving external groups for team '{org_id}/{team_slug}':\n{ex}") from ex + + return response + + async def update_team_external_groups(self, org_id: str, team_slug: str, group: str | None) -> None: + _logger.debug("updating external_groups for team '%s' in org '%s'", team_slug, org_id) + if group is None: + status, body = await self.requester.request_raw( + "DELETE", f"/orgs/{org_id}/teams/{team_slug}/external-groups" + ) + if status != 204: + raise RuntimeError( + f"failed updating external groups from team '{team_slug}' in org '{org_id}'\n{status}: {body}" + ) + + _logger.debug("updated external groups from team '%s' in org '%s'", team_slug, org_id) + else: + data = {"group_id": f"{group}"} + status, body = await self.requester.request_raw( + "PATCH", f"/orgs/{org_id}/teams/{team_slug}/external-groups", data=json.dumps(data) + ) + + if status == 200: + _logger.debug("updated external groups '%s' of team '%s' for org '%s'", group, team_slug, org_id) + else: + raise RuntimeError( + f"failed updating external groups '{group}' to team '{team_slug}' in org '{org_id}'\n{status}: {body}" + ) diff --git a/otterdog/resources/graphql/get-repository-permissions-of-team.gql b/otterdog/resources/graphql/get-repository-permissions-of-team.gql new file mode 100644 index 00000000..f4a12412 --- /dev/null +++ b/otterdog/resources/graphql/get-repository-permissions-of-team.gql @@ -0,0 +1,23 @@ +query TeamRepositoriesQuery( + $org: String!, + $teamSlug: String!, + $endCursor: String +) { + organization(login: $org) { + team(slug: $teamSlug) { + repositories(first: 100, after: $endCursor) { + edges { + permission + node { + name + id + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} diff --git a/otterdog/resources/graphql/get-team-permissions-repositories.gql b/otterdog/resources/graphql/get-team-permissions-repositories.gql new file mode 100644 index 00000000..64bbfde3 --- /dev/null +++ b/otterdog/resources/graphql/get-team-permissions-repositories.gql @@ -0,0 +1,28 @@ +query ($org: String!, $endCursor: String) { + organization(login: $org) { + teams(first: 100, after: $endCursor) { + nodes { + id + slug + name + repositories(first: 100) { + edges { + permission + node { + name + id + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + pageInfo { + hasNextPage + endCursor + } + } + } +} diff --git a/otterdog/resources/schemas/env-secret.json b/otterdog/resources/schemas/env-secret.json new file mode 100644 index 00000000..d9c1d3ce --- /dev/null +++ b/otterdog/resources/schemas/env-secret.json @@ -0,0 +1,8 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + + "$ref": "secret.json", + "type": "object", + "required": [ "name", "value" ], + "unevaluatedProperties": false +} diff --git a/otterdog/resources/schemas/env-variable.json b/otterdog/resources/schemas/env-variable.json new file mode 100644 index 00000000..7dcc582f --- /dev/null +++ b/otterdog/resources/schemas/env-variable.json @@ -0,0 +1,8 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + + "$ref": "variable.json", + "type": "object", + "required": [ "name", "value" ], + "unevaluatedProperties": false +} diff --git a/otterdog/resources/schemas/environment.json b/otterdog/resources/schemas/environment.json index ba1bea7a..8ce58941 100644 --- a/otterdog/resources/schemas/environment.json +++ b/otterdog/resources/schemas/environment.json @@ -13,6 +13,14 @@ "branch_policies": { "type": "array", "items": { "type": "string" } + }, + "secrets": { + "type": "array", + "items": { "$ref": "env-secret.json" } + }, + "variables": { + "type": "array", + "items": { "$ref": "env-variable.json" } } }, diff --git a/otterdog/resources/schemas/repository.json b/otterdog/resources/schemas/repository.json index e017e17c..19a8f879 100644 --- a/otterdog/resources/schemas/repository.json +++ b/otterdog/resources/schemas/repository.json @@ -85,6 +85,10 @@ "environments": { "type": "array", "items": { "$ref": "environment.json" } + }, + "team_permissions": { + "type": "array", + "items": { "$ref": "team-permission.json" } } }, diff --git a/otterdog/resources/schemas/team-permission.json b/otterdog/resources/schemas/team-permission.json new file mode 100644 index 00000000..80bec638 --- /dev/null +++ b/otterdog/resources/schemas/team-permission.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + + "type": "object", + "properties": { + "name": { "type": "string" }, + "permission": { "type": "string" } + }, + "required": ["name", "permission"], + "additionalProperties": false +} diff --git a/otterdog/resources/schemas/team.json b/otterdog/resources/schemas/team.json index c247d5f9..04221d5c 100644 --- a/otterdog/resources/schemas/team.json +++ b/otterdog/resources/schemas/team.json @@ -1,20 +1,25 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "type": "object", + "properties": { "name": { "type": "string" }, - "description": { "$ref": "types.json#/$defs/string-or-null"}, + "description": { "$ref": "types.json#/$defs/string-or-null" }, + "privacy": { "type": "string" }, + "notifications": { "type": "boolean" }, + "skip_members": { "type": "boolean" }, + "skip_non_organization_members": { "type": "boolean" }, + "members": { "type": "array", "items": { "type": "string" } }, - "privacy": { "type": "string" }, - "notifications": { "type": "boolean" }, - "skip_members": { "type": "boolean" }, - "skip_non_organization_members": { "type": "boolean" } - }, - "required": [ "name", "privacy" ], + "team_sync_id": { "$ref": "types.json#/$defs/string-or-null" }, + "team_sync_name": { "$ref": "types.json#/$defs/string-or-null" }, + "team_sync_description": { "$ref": "types.json#/$defs/string-or-null" }, + "external_groups": { "$ref": "types.json#/$defs/string-or-null" } + }, + "required": ["name", "privacy"], "additionalProperties": false } diff --git a/otterdog/utils.py b/otterdog/utils.py index f6d9023e..f4484850 100644 --- a/otterdog/utils.py +++ b/otterdog/utils.py @@ -111,7 +111,7 @@ def is_different_ignoring_order(value: Any, other_value: Any) -> bool: elif isinstance(value, dict) and isinstance(other_value, dict): for key, item_value in value.items(): other_item_value = other_value.get(key, None) - if isinstance(item_value, list): + if isinstance(item_value, list) and isinstance(other_item_value, list): if sorted(item_value) != sorted(other_item_value): return True else: @@ -476,7 +476,7 @@ def sort_jsonnet(lines: list[str]) -> list[str]: def _sort_node(node): - line, context = node + _, context = node if context is not None: last = context.pop() diff --git a/otterdog/webapp/blueprints/__init__.py b/otterdog/webapp/blueprints/__init__.py index 19525484..ba4a7173 100644 --- a/otterdog/webapp/blueprints/__init__.py +++ b/otterdog/webapp/blueprints/__init__.py @@ -10,7 +10,7 @@ import re from abc import ABC, abstractmethod -from enum import Enum +from enum import StrEnum from functools import cached_property from logging import Logger, getLogger from typing import TYPE_CHECKING, Any @@ -27,7 +27,7 @@ BLUEPRINT_PATH = "otterdog/blueprints" -class BlueprintType(str, Enum): +class BlueprintType(StrEnum): REQUIRED_FILE = "required_file" PIN_WORKFLOW = "pin_workflow" APPEND_CONFIGURATION = "append_configuration" diff --git a/otterdog/webapp/db/models.py b/otterdog/webapp/db/models.py index d1a64045..61d62929 100644 --- a/otterdog/webapp/db/models.py +++ b/otterdog/webapp/db/models.py @@ -9,15 +9,15 @@ from __future__ import annotations from datetime import datetime -from enum import Enum -from typing import Any, Optional +from enum import StrEnum +from typing import Any from odmantic import EmbeddedModel, Field, Model from otterdog.webapp.utils import current_utc_time -class InstallationStatus(str, Enum): +class InstallationStatus(StrEnum): INSTALLED = "installed" NOT_INSTALLED = "not_installed" SUSPENDED = "suspended" @@ -28,14 +28,14 @@ def __str__(self) -> str: class InstallationModel(Model): github_id: str = Field(primary_field=True) - project_name: Optional[str] = Field(unique=True, index=True) + project_name: str | None = Field(unique=True, index=True) installation_id: int = Field(index=True, default=0) installation_status: InstallationStatus - config_repo: Optional[str] = None - base_template: Optional[str] = None + config_repo: str | None = None + base_template: str | None = None -class TaskStatus(str, Enum): +class TaskStatus(StrEnum): CREATED = "created" SCHEDULED = "scheduled" FINISHED = "finished" @@ -51,7 +51,7 @@ class TaskModel(Model): repo_name: str = Field(index=True) pull_request: int = 0 status: TaskStatus = TaskStatus.CREATED - log: Optional[str] = None + log: str | None = None cache_stats: str = "" rate_limit_remaining: str = "" created_at: datetime = Field(index=True, default_factory=current_utc_time) @@ -60,12 +60,12 @@ class TaskModel(Model): class ConfigurationModel(Model): github_id: str = Field(primary_field=True) - project_name: Optional[str] = Field(unique=True, index=True) + project_name: str | None = Field(unique=True, index=True) config: dict sha: str -class PullRequestStatus(str, Enum): +class PullRequestStatus(StrEnum): OPEN = "open" CLOSED = "closed" MERGED = "merged" @@ -74,7 +74,7 @@ def __str__(self) -> str: return self.name -class ApplyStatus(str, Enum): +class ApplyStatus(StrEnum): NOT_APPLIED = "not_applied" FAILED = "failed" PARTIALLY_APPLIED = "partially_applied" @@ -96,18 +96,18 @@ class PullRequestModel(Model): status: PullRequestStatus = Field(index=True) apply_status: ApplyStatus = Field(index=True, default=ApplyStatus.NOT_APPLIED) - valid: Optional[bool] = None - in_sync: Optional[bool] = None - requires_manual_apply: Optional[bool] = None - supports_auto_merge: Optional[bool] = None - author_can_auto_merge: Optional[bool] = None - has_required_approvals: Optional[bool] = None + valid: bool | None = None + in_sync: bool | None = None + requires_manual_apply: bool | None = None + supports_auto_merge: bool | None = None + author_can_auto_merge: bool | None = None + has_required_approvals: bool | None = None created_at: datetime = Field(index=True) updated_at: datetime = Field(index=True) - closed_at: Optional[datetime] = None - merged_at: Optional[datetime] = Field(index=True, default=None) + closed_at: datetime | None = None + merged_at: datetime | None = Field(index=True, default=None) def can_be_automerged(self) -> bool: return ( @@ -138,8 +138,8 @@ class StatisticsModel(Model): class UserModel(Model): node_id: str = Field(primary_field=True) username: str - email: Optional[str] = None - full_name: Optional[str] = None + email: str | None = None + full_name: str | None = None projects: list[str] = Field(default_factory=list) @@ -151,8 +151,8 @@ class PolicyId(EmbeddedModel): class PolicyModel(Model): id: PolicyId = Field(primary_field=True) path: str - name: Optional[str] = None - description: Optional[str] = None + name: str | None = None + description: str | None = None config: dict @@ -170,10 +170,10 @@ class BlueprintId(EmbeddedModel): class BlueprintModel(Model): id: BlueprintId = Field(primary_field=True) path: str - name: Optional[str] = None - description: Optional[str] = None + name: str | None = None + description: str | None = None recheck_needed: bool = True - last_checked: Optional[datetime] = Field(index=True, default=None) + last_checked: datetime | None = Field(index=True, default=None) config: dict @@ -183,7 +183,7 @@ class BlueprintStatusId(EmbeddedModel): blueprint_id: str -class BlueprintStatus(str, Enum): +class BlueprintStatus(StrEnum): NOT_CHECKED = "not_checked" SUCCESS = "success" FAILURE = "failure" @@ -200,7 +200,7 @@ class BlueprintStatusModel(Model): updated_at: datetime = Field(index=True, default_factory=current_utc_time) status: BlueprintStatus = Field(default=BlueprintStatus.NOT_CHECKED) - remediation_pr: Optional[int] = Field(index=True, default=None) + remediation_pr: int | None = Field(index=True, default=None) class ScorecardId(EmbeddedModel): @@ -212,6 +212,6 @@ class ScorecardResultModel(Model): id: ScorecardId = Field(primary_field=True) updated_at: datetime = Field(index=True, default_factory=current_utc_time) - score: Optional[float] = None - scorecard_version: Optional[str] = None + score: float | None = None + scorecard_version: str | None = None checks: list[dict[str, Any]] = Field(default_factory=list) diff --git a/otterdog/webapp/policies/__init__.py b/otterdog/webapp/policies/__init__.py index 057ac256..e00dabcd 100644 --- a/otterdog/webapp/policies/__init__.py +++ b/otterdog/webapp/policies/__init__.py @@ -9,7 +9,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from enum import Enum +from enum import StrEnum from typing import TYPE_CHECKING, Any, Self from pydantic import BaseModel @@ -21,7 +21,7 @@ POLICY_PATH = "otterdog/policies" -class PolicyType(str, Enum): +class PolicyType(StrEnum): DEPENDENCY_TRACK_UPLOAD = "dependency_track_upload" MACOS_LARGE_RUNNERS_USAGE = "macos_large_runners" diff --git a/otterdog/webapp/tasks/__init__.py b/otterdog/webapp/tasks/__init__.py index 635faa66..7a210955 100644 --- a/otterdog/webapp/tasks/__init__.py +++ b/otterdog/webapp/tasks/__init__.py @@ -163,8 +163,7 @@ def _update_task_model(self, task: TaskModel) -> None: cache_stats = "rest: no requests" else: cache_stats = ( - f"rest: {self.rest_statistics.cached_responses}/{self.rest_statistics.total_requests} " - f"request(s) cached" + f"rest: {self.rest_statistics.cached_responses}/{self.rest_statistics.total_requests} request(s) cached" ) if self.rest_statistics.remaining_rate_limit != -1: diff --git a/otterdog/webapp/tasks/blueprints/__init__.py b/otterdog/webapp/tasks/blueprints/__init__.py index d3386398..188c18b8 100644 --- a/otterdog/webapp/tasks/blueprints/__init__.py +++ b/otterdog/webapp/tasks/blueprints/__init__.py @@ -55,7 +55,7 @@ async def _pre_execute(self) -> bool: match blueprint_status.status: case BlueprintStatus.DISMISSED: self.logger.debug( - f"Blueprint '{self.blueprint.id}' dismissed for " f"repo '{self.org_id}/{self.repo_name}', skipping" + f"Blueprint '{self.blueprint.id}' dismissed for repo '{self.org_id}/{self.repo_name}', skipping" ) return False @@ -141,7 +141,7 @@ async def _create_pull_request( team_reviewers: list[str] | None = None, ) -> int: self.logger.debug( - f"creating pull request for blueprint '{self.blueprint.id}' " f"in repo '{self.org_id}/{self.repo_name}'" + f"creating pull request for blueprint '{self.blueprint.id}' in repo '{self.org_id}/{self.repo_name}'" ) if self.blueprint.description is not None: diff --git a/otterdog/webapp/tasks/blueprints/pin_workflow.py b/otterdog/webapp/tasks/blueprints/pin_workflow.py index 0fa65638..8f1c0116 100644 --- a/otterdog/webapp/tasks/blueprints/pin_workflow.py +++ b/otterdog/webapp/tasks/blueprints/pin_workflow.py @@ -67,7 +67,7 @@ async def _execute(self) -> CheckResult: async def _process_workflows( self, - pinned_workflows: dict[str, tuple[dict, list[str]]], + pinned_workflows: dict[str, tuple[WorkflowFile, list[str]]], result: CheckResult, ) -> None: result.remediation_needed = True diff --git a/otterdog/webapp/tasks/validate_pull_request.py b/otterdog/webapp/tasks/validate_pull_request.py index 0255f36f..8874fe8e 100644 --- a/otterdog/webapp/tasks/validate_pull_request.py +++ b/otterdog/webapp/tasks/validate_pull_request.py @@ -277,8 +277,7 @@ async def _get_pull_request_files(self, rest_api: RestApi) -> list[str]: def __repr__(self) -> str: return ( - f"ValidatePullRequestTask(repo='{self.org_id}/{self.repo_name}', " - f"pull_request=#{self.pull_request_number})" + f"ValidatePullRequestTask(repo='{self.org_id}/{self.repo_name}', pull_request=#{self.pull_request_number})" ) diff --git a/otterdog/webapp/utils.py b/otterdog/webapp/utils.py index f2c9a4f2..60a49899 100644 --- a/otterdog/webapp/utils.py +++ b/otterdog/webapp/utils.py @@ -214,7 +214,7 @@ async def _load_otterdog_config(ref: str | None = None) -> OtterdogConfig: import aiofiles async with aiofiles.tempfile.NamedTemporaryFile("wt") as file: - name = cast(str, file.name) + name = cast("str", file.name) await file.write(content) await file.flush() return OtterdogConfig.from_file(name, False, app_root) diff --git a/otterdog/webapp/webhook/github_models.py b/otterdog/webapp/webhook/github_models.py index 78852451..57465bba 100644 --- a/otterdog/webapp/webhook/github_models.py +++ b/otterdog/webapp/webhook/github_models.py @@ -13,7 +13,7 @@ from abc import ABC from collections.abc import Callable from datetime import datetime -from enum import Enum +from enum import StrEnum from pydantic import BaseModel @@ -131,7 +131,7 @@ class AssociatedPullRequest(BaseModel): html_url: str -class AuthorAssociation(str, Enum): +class AuthorAssociation(StrEnum): COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIME" diff --git a/poetry.lock b/poetry.lock index bc57f700..9fb72eac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1570,6 +1570,93 @@ cryptography = ">=3.1,<3.4.0 || >3.4.0" dev = ["black", "isort", "mypy", "types-freezegun"] test = ["freezegun", "pytest (>=6.0,<7.0)", "pytest-cov"] +[[package]] +name = "librt" +version = "0.7.8" +description = "Mypyc runtime library" +optional = false +python-versions = ">=3.9" +groups = ["typing"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "librt-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d"}, + {file = "librt-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b"}, + {file = "librt-0.7.8-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d"}, + {file = "librt-0.7.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d"}, + {file = "librt-0.7.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c"}, + {file = "librt-0.7.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c"}, + {file = "librt-0.7.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d"}, + {file = "librt-0.7.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0"}, + {file = "librt-0.7.8-cp310-cp310-win32.whl", hash = "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85"}, + {file = "librt-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c"}, + {file = "librt-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f"}, + {file = "librt-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac"}, + {file = "librt-0.7.8-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c"}, + {file = "librt-0.7.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8"}, + {file = "librt-0.7.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff"}, + {file = "librt-0.7.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3"}, + {file = "librt-0.7.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75"}, + {file = "librt-0.7.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873"}, + {file = "librt-0.7.8-cp311-cp311-win32.whl", hash = "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7"}, + {file = "librt-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c"}, + {file = "librt-0.7.8-cp311-cp311-win_arm64.whl", hash = "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232"}, + {file = "librt-0.7.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63"}, + {file = "librt-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93"}, + {file = "librt-0.7.8-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592"}, + {file = "librt-0.7.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850"}, + {file = "librt-0.7.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62"}, + {file = "librt-0.7.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b"}, + {file = "librt-0.7.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714"}, + {file = "librt-0.7.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449"}, + {file = "librt-0.7.8-cp312-cp312-win32.whl", hash = "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac"}, + {file = "librt-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708"}, + {file = "librt-0.7.8-cp312-cp312-win_arm64.whl", hash = "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0"}, + {file = "librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc"}, + {file = "librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2"}, + {file = "librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3"}, + {file = "librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6"}, + {file = "librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d"}, + {file = "librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e"}, + {file = "librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca"}, + {file = "librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93"}, + {file = "librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951"}, + {file = "librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34"}, + {file = "librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09"}, + {file = "librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418"}, + {file = "librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611"}, + {file = "librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758"}, + {file = "librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea"}, + {file = "librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac"}, + {file = "librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398"}, + {file = "librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81"}, + {file = "librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83"}, + {file = "librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d"}, + {file = "librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44"}, + {file = "librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce"}, + {file = "librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f"}, + {file = "librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde"}, + {file = "librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e"}, + {file = "librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b"}, + {file = "librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666"}, + {file = "librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581"}, + {file = "librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a"}, + {file = "librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca"}, + {file = "librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365"}, + {file = "librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32"}, + {file = "librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06"}, + {file = "librt-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c7e8f88f79308d86d8f39c491773cbb533d6cb7fa6476f35d711076ee04fceb6"}, + {file = "librt-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:389bd25a0db916e1d6bcb014f11aa9676cedaa485e9ec3752dfe19f196fd377b"}, + {file = "librt-0.7.8-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73fd300f501a052f2ba52ede721232212f3b06503fa12665408ecfc9d8fd149c"}, + {file = "librt-0.7.8-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d772edc6a5f7835635c7562f6688e031f0b97e31d538412a852c49c9a6c92d5"}, + {file = "librt-0.7.8-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde8a130bd0f239e45503ab39fab239ace094d63ee1d6b67c25a63d741c0f71"}, + {file = "librt-0.7.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fdec6e2368ae4f796fc72fad7fd4bd1753715187e6d870932b0904609e7c878e"}, + {file = "librt-0.7.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:00105e7d541a8f2ee5be52caacea98a005e0478cfe78c8080fbb7b5d2b340c63"}, + {file = "librt-0.7.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c6f8947d3dfd7f91066c5b4385812c18be26c9d5a99ca56667547f2c39149d94"}, + {file = "librt-0.7.8-cp39-cp39-win32.whl", hash = "sha256:41d7bb1e07916aeb12ae4a44e3025db3691c4149ab788d0315781b4d29b86afb"}, + {file = "librt-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:e90a8e237753c83b8e484d478d9a996dc5e39fd5bd4c6ce32563bc8123f132be"}, + {file = "librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862"}, +] + [[package]] name = "markdown" version = "3.10" @@ -2093,53 +2180,54 @@ files = [ [[package]] name = "mypy" -version = "1.18.2" +version = "1.19.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" groups = ["typing"] files = [ - {file = "mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c"}, - {file = "mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e"}, - {file = "mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b"}, - {file = "mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66"}, - {file = "mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428"}, - {file = "mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed"}, - {file = "mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f"}, - {file = "mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341"}, - {file = "mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d"}, - {file = "mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86"}, - {file = "mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37"}, - {file = "mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8"}, - {file = "mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34"}, - {file = "mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764"}, - {file = "mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893"}, - {file = "mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914"}, - {file = "mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8"}, - {file = "mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074"}, - {file = "mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc"}, - {file = "mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e"}, - {file = "mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986"}, - {file = "mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d"}, - {file = "mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba"}, - {file = "mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544"}, - {file = "mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce"}, - {file = "mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d"}, - {file = "mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c"}, - {file = "mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb"}, - {file = "mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075"}, - {file = "mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf"}, - {file = "mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b"}, - {file = "mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133"}, - {file = "mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6"}, - {file = "mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac"}, - {file = "mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b"}, - {file = "mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0"}, - {file = "mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e"}, - {file = "mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e"}, + {file = "mypy-1.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376"}, + {file = "mypy-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, ] [package.dependencies] +librt = {version = ">=0.6.2", markers = "platform_python_implementation != \"PyPy\""} mypy_extensions = ">=1.0.0" pathspec = ">=0.9.0" typing_extensions = ">=4.6.0" @@ -2177,14 +2265,14 @@ files = [ [[package]] name = "odmantic" -version = "1.0.2" -description = "ODMantic, an AsyncIO MongoDB Object Document Mapper for Python using type hints" +version = "1.1.0" +description = "ODMantic, an AsyncIO MongoDB Object Document Mapper for Python using type hints " optional = false python-versions = ">=3.8" groups = ["app"] files = [ - {file = "odmantic-1.0.2-py3-none-any.whl", hash = "sha256:0475c763687624b70ba4d52266a7d0bdfb74dda7ec40162669334e9d41c06049"}, - {file = "odmantic-1.0.2.tar.gz", hash = "sha256:bbd0d46bc920e0e6d966d9f3add7396bc92377d2c73860d071d5ca8102f57235"}, + {file = "odmantic-1.1.0-py3-none-any.whl", hash = "sha256:01a851c2558e7cde69e292a54fd7b2d5cbc03b7af88b4f3a841d51c79274d159"}, + {file = "odmantic-1.1.0.tar.gz", hash = "sha256:70a1cfa7211870cd3d7eacf5a3fd99db29d6b333a33c77dcaa13330d003e2982"}, ] [package.dependencies] @@ -2193,11 +2281,11 @@ pydantic = ">=2.5.2" pymongo = ">=4.1.0" [package.extras] -dev = ["ipython (>=7.16.1,<7.17.0)", "semver (>=2.13.0,<2.14.0)", "typer (>=0.4.1,<0.5.0)"] -doc = ["mkdocs-macros-plugin (>=1.0.4,<1.1.0)", "mkdocs-material (>=9.5.2,<9.6.0)", "mkdocstrings[python] (>=0.24.0,<0.25.0)", "pydocstyle[toml] (>=6.3.0,<6.4.0)"] +dev = ["ipython (>=7.16.1)"] +doc = ["griffe (<1.0)", "mkdocs-macros-plugin (>=1.0.4)", "mkdocs-material (>=9.5.2)", "mkdocstrings[python] (>=0.18)", "pydocstyle[toml] (>=6.3.0)"] fastapi = ["fastapi (>=0.100.0)"] -lint = ["mypy (>=1.4.1,<1.5.0)", "ruff (>=0.3.3,<0.4.0)"] -test = ["async-asgi-testclient (>=1.4.11,<1.5.0)", "asyncmock (>=0.4.2,<0.5.0)", "coverage[toml] (>=6.2,<7.0)", "darglint (>=1.8.1,<1.9.0)", "fastapi (>=0.104.0)", "httpx (>=0.24.1,<0.25.0)", "inline-snapshot (>=0.6.0,<0.7.0)", "pytest (>=7.0,<8.0)", "pytest-asyncio (>=0.16.0,<0.17.0)", "pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-codspeed (>=2.1.0,<2.2.0)", "pytest-sugar (>=0.9.5,<0.10.0)", "pytest-xdist (>=2.1.0,<2.2.0)", "pytz (>=2023.3,<2024.0)", "requests (>=2.24,<3.0)", "types-pytz (>=2023.3.0.0,<2023.4.0.0)", "uvicorn (>=0.17.0,<0.18.0)"] +lint = ["mypy (>=1.4.1)", "ruff (>=0.3.3)"] +test = ["async-asgi-testclient (>=1.4.11)", "asyncmock (>=0.4.2)", "coverage[toml] (>=6.2)", "darglint (>=1.8.1)", "fastapi (>=0.104.0,<0.109)", "httpx (>=0.24.1,<0.28)", "inline-snapshot (>=0.6.0)", "pytest (>=7.0)", "pytest-asyncio (>=0.16.0)", "pytest-benchmark (>=4.0.0)", "pytest-codspeed (>=2.1.0)", "pytest-sugar (>=0.9.5)", "pytest-xdist (>=2.1.0)", "pytz (>=2023.3)", "requests (>=2.24)", "types-pytz (>=2023.3.0.0)", "uvicorn (>=0.17.0)"] [[package]] name = "packaging" @@ -3372,31 +3460,30 @@ files = [ [[package]] name = "ruff" -version = "0.14.5" +version = "0.15.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.14.5-py3-none-linux_armv6l.whl", hash = "sha256:f3b8248123b586de44a8018bcc9fefe31d23dda57a34e6f0e1e53bd51fd63594"}, - {file = "ruff-0.14.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f7a75236570318c7a30edd7f5491945f0169de738d945ca8784500b517163a72"}, - {file = "ruff-0.14.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6d146132d1ee115f8802356a2dc9a634dbf58184c51bff21f313e8cd1c74899a"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2380596653dcd20b057794d55681571a257a42327da8894b93bbd6111aa801f"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d1fa985a42b1f075a098fa1ab9d472b712bdb17ad87a8ec86e45e7fa6273e68"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88f0770d42b7fa02bbefddde15d235ca3aa24e2f0137388cc15b2dcbb1f7c7a7"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3676cb02b9061fee7294661071c4709fa21419ea9176087cb77e64410926eb78"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b595bedf6bc9cab647c4a173a61acf4f1ac5f2b545203ba82f30fcb10b0318fb"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f55382725ad0bdb2e8ee2babcbbfb16f124f5a59496a2f6a46f1d9d99d93e6e2"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7497d19dce23976bdaca24345ae131a1d38dcfe1b0850ad8e9e6e4fa321a6e19"}, - {file = "ruff-0.14.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:410e781f1122d6be4f446981dd479470af86537fb0b8857f27a6e872f65a38e4"}, - {file = "ruff-0.14.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c01be527ef4c91a6d55e53b337bfe2c0f82af024cc1a33c44792d6844e2331e1"}, - {file = "ruff-0.14.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f66e9bb762e68d66e48550b59c74314168ebb46199886c5c5aa0b0fbcc81b151"}, - {file = "ruff-0.14.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d93be8f1fa01022337f1f8f3bcaa7ffee2d0b03f00922c45c2207954f351f465"}, - {file = "ruff-0.14.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c135d4b681f7401fe0e7312017e41aba9b3160861105726b76cfa14bc25aa367"}, - {file = "ruff-0.14.5-py3-none-win32.whl", hash = "sha256:c83642e6fccfb6dea8b785eb9f456800dcd6a63f362238af5fc0c83d027dd08b"}, - {file = "ruff-0.14.5-py3-none-win_amd64.whl", hash = "sha256:9d55d7af7166f143c94eae1db3312f9ea8f95a4defef1979ed516dbb38c27621"}, - {file = "ruff-0.14.5-py3-none-win_arm64.whl", hash = "sha256:4b700459d4649e2594b31f20a9de33bc7c19976d4746d8d0798ad959621d64a4"}, - {file = "ruff-0.14.5.tar.gz", hash = "sha256:8d3b48d7d8aad423d3137af7ab6c8b1e38e4de104800f0d596990f6ada1a9fc1"}, + {file = "ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455"}, + {file = "ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d"}, + {file = "ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4"}, + {file = "ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e"}, + {file = "ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662"}, + {file = "ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1"}, + {file = "ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16"}, + {file = "ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3"}, + {file = "ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3"}, + {file = "ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18"}, + {file = "ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a"}, + {file = "ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a"}, ] [[package]] @@ -4007,4 +4094,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = ">=3.11,<4.0" -content-hash = "bb71a1896310dda759a72314b45eb236bb7d64766b88e5e8886c46fde5b1cb3d" +content-hash = "51048d0a99b363f8e9ce115d8a09e6b3fd350252f1ac796ad5106609e72215f0" diff --git a/pyproject.toml b/pyproject.toml index b922580c..7a8b7d08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ python-decouple = "^3.8" python-dotenv = "^1.0" pymongo = "==4.9.1" pydantic = "^2.7" -odmantic = "^1.0" +odmantic = "^1.1.0" quart-auth = "^0.10" quart-redis = "^2.0" github-flask = "^3.2" @@ -103,7 +103,7 @@ semver = "^3.0" python-slugify = "^8.0" [tool.poetry.group.dev.dependencies] -ruff = ">=0.8" +ruff = ">=0.15" pre-commit = ">=4.0" [tool.poetry.group.test.dependencies] @@ -115,7 +115,7 @@ pytest-cov = ">=6.1.1" pretend = "^1.0.9" [tool.poetry.group.typing.dependencies] -mypy = ">=1.8.0" +mypy = ">=1.19.1" types-colorama = ">=0.4" types-requests = ">=2.31" types-jsonschema = ">=4.17" diff --git a/tests/models/resources/github-env-secret.json b/tests/models/resources/github-env-secret.json new file mode 100644 index 00000000..36339d61 --- /dev/null +++ b/tests/models/resources/github-env-secret.json @@ -0,0 +1,4 @@ +{ + "name": "TEST-SECRET", + "visibility": "selected" +} diff --git a/tests/models/resources/github-team-permission.json b/tests/models/resources/github-team-permission.json new file mode 100644 index 00000000..25e2b47b --- /dev/null +++ b/tests/models/resources/github-team-permission.json @@ -0,0 +1,4 @@ +{ + "name": "TEAM", + "permission": "READ" +} diff --git a/tests/models/resources/otterdog-env-secret.json b/tests/models/resources/otterdog-env-secret.json new file mode 100644 index 00000000..59da4df1 --- /dev/null +++ b/tests/models/resources/otterdog-env-secret.json @@ -0,0 +1,5 @@ +{ + "name": "TEST-SECRET", + "visibility": "selected", + "value": "5678" +} diff --git a/tests/models/resources/otterdog-team-permission.json b/tests/models/resources/otterdog-team-permission.json new file mode 100644 index 00000000..545da5e2 --- /dev/null +++ b/tests/models/resources/otterdog-team-permission.json @@ -0,0 +1,4 @@ +{ + "name": "TEAM", + "permission": "pull" +} diff --git a/tests/models/resources/test-org/vendor/github-env-secret.json b/tests/models/resources/test-org/vendor/github-env-secret.json new file mode 100644 index 00000000..b9244285 --- /dev/null +++ b/tests/models/resources/test-org/vendor/github-env-secret.json @@ -0,0 +1,4 @@ +{ + "name": "TEST-SECRET", + "visibility": "selected" + } diff --git a/tests/models/test_env_secret.py b/tests/models/test_env_secret.py new file mode 100644 index 00000000..22119eb4 --- /dev/null +++ b/tests/models/test_env_secret.py @@ -0,0 +1,67 @@ +# ******************************************************************************* +# Copyright (c) 2023-2025 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from collections.abc import Mapping +from typing import Any + +from otterdog.jsonnet import JsonnetConfig +from otterdog.models import ModelObject +from otterdog.models.env_secret import EnvironmentSecret +from otterdog.utils import Change + +from . import ModelTest + + +class EnvironmentSecretTest(ModelTest): + def create_model(self, data: Mapping[str, Any]) -> ModelObject: + return EnvironmentSecret.from_model_data(data) + + @property + def template_function(self) -> str: + return JsonnetConfig.create_org_secret + + @property + def model_data(self): + return self.load_json_resource("otterdog-env-secret.json") + + @property + def provider_data(self): + return self.load_json_resource("github-env-secret.json") + + def test_load_from_model(self): + secret = EnvironmentSecret.from_model_data(self.model_data) + + assert secret.name == "TEST-SECRET" + assert secret.value == "5678" + + def test_load_from_provider(self): + secret = EnvironmentSecret.from_provider_data(self.org_id, self.provider_data) + + assert secret.name == "TEST-SECRET" + assert secret.value == "********" + + def test_patch(self): + current = EnvironmentSecret.from_model_data(self.model_data) + default = EnvironmentSecret.from_model_data(self.model_data) + + default.value = "8765" + patch = current.get_patch_to(default) + + assert len(patch) == 1 + assert patch["value"] == current.value + + def test_difference(self): + current = EnvironmentSecret.from_model_data(self.model_data) + other = EnvironmentSecret.from_model_data(self.model_data) + + other.value = "8765" + + diff = current.get_difference_from(other) + + assert len(diff) == 1 + assert diff["value"] == Change(other.value, current.value) diff --git a/tests/models/test_env_variable.py b/tests/models/test_env_variable.py new file mode 100644 index 00000000..50190945 --- /dev/null +++ b/tests/models/test_env_variable.py @@ -0,0 +1,33 @@ +# ******************************************************************************* +# Copyright (c) 2023-2025 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from collections.abc import Mapping +from typing import Any + +from otterdog.jsonnet import JsonnetConfig +from otterdog.models import ModelObject +from otterdog.models.env_variable import EnvironmentVariable + +from . import ModelTest + + +class EnvironmentVariableTest(ModelTest): + def create_model(self, data: Mapping[str, Any]) -> ModelObject: + return EnvironmentVariable.from_model_data(data) + + @property + def template_function(self) -> str: + return JsonnetConfig.create_env_variable + + @property + def model_data(self): + raise NotImplementedError + + @property + def provider_data(self): + raise NotImplementedError diff --git a/tests/models/test_repository.py b/tests/models/test_repository.py index 08f98a1c..ce58e930 100644 --- a/tests/models/test_repository.py +++ b/tests/models/test_repository.py @@ -187,6 +187,7 @@ def test__include_gh_pages_patch_required_properties( current_object=default, changes=changes, parent_object=None, + grandparent_object=None, forced_update=False, fn=pretend.stub(), changes_object_to_readonly=False, @@ -257,6 +258,7 @@ def test__include_squash_merge_patch_required_properties( current_object=default, changes=changes, parent_object=None, + grandparent_object=None, forced_update=False, fn=pretend.stub(), changes_object_to_readonly=False, @@ -314,14 +316,14 @@ def test_gh_pages_visibility_validation(self, repository_test): repo.private = True repo.gh_pages_visibility = "public" - repo.validate(context, mock_org_enterprise) + repo.validate(context, mock_org_enterprise, None) failures = [ f for f in context.validation_failures if f[0] == FailureType.ERROR and "gh_pages_visibility" in f[1] ] assert len(failures) == initial_failures repo.gh_pages_visibility = "private" - repo.validate(context, mock_org_enterprise) + repo.validate(context, mock_org_enterprise, None) failures = [ f for f in context.validation_failures if f[0] == FailureType.ERROR and "gh_pages_visibility" in f[1] ] @@ -329,7 +331,7 @@ def test_gh_pages_visibility_validation(self, repository_test): # Test invalid value - should add a failure repo.gh_pages_visibility = "invalid" - repo.validate(context, mock_org_enterprise) + repo.validate(context, mock_org_enterprise, None) # Check that validation failed for invalid value failures = [ @@ -344,7 +346,7 @@ def test_gh_pages_visibility_validation(self, repository_test): # Test with public repository (should fail even with enterprise plan) repo.private = False - repo.validate(context, mock_org_enterprise) + repo.validate(context, mock_org_enterprise, None) failures = [ f for f in context.validation_failures if f[0] == FailureType.ERROR and "gh_pages_visibility" in f[1] @@ -358,7 +360,7 @@ def test_gh_pages_visibility_validation(self, repository_test): repo.gh_pages_visibility = "private" # Test with enterprise plan but members_can_create_private_pages disabled - should fail - repo.validate(context, mock_org_enterprise_no_private_pages) + repo.validate(context, mock_org_enterprise_no_private_pages, None) failures = [ f for f in context.validation_failures if f[0] == FailureType.ERROR and "gh_pages_visibility" in f[1] diff --git a/tests/models/test_ruleset.py b/tests/models/test_ruleset.py index 2c1e9fa8..e6a91c4f 100644 --- a/tests/models/test_ruleset.py +++ b/tests/models/test_ruleset.py @@ -162,28 +162,28 @@ def test_get_mapping_from_provider(self, caplog, test_case, bypass_actors, expec mapping = Ruleset.get_mapping_from_provider(self.org_id, data) warning_records = [r for r in caplog.records if r.levelname == "WARNING"] - assert len(warning_records) == len( - expected_warnings - ), f"Test case '{test_case}': Expected {len(expected_warnings)} warnings, got {len(warning_records)}" + assert len(warning_records) == len(expected_warnings), ( + f"Test case '{test_case}': Expected {len(expected_warnings)} warnings, got {len(warning_records)}" + ) for i, (expected_msg, expected_arg) in enumerate(expected_warnings): if i < len(warning_records): record = warning_records[i] - assert ( - expected_msg % expected_arg in record.message - ), f"Test case '{test_case}': Warning message mismatch. Expected '{expected_msg % expected_arg}', got '{record.message}'" + assert expected_msg % expected_arg in record.message, ( + f"Test case '{test_case}': Warning message mismatch. Expected '{expected_msg % expected_arg}', got '{record.message}'" + ) result = bend(mapping, data) bypass_actors_result = result["bypass_actors"] - assert len(bypass_actors_result) == len( - expected_actors - ), f"Test case '{test_case}': Expected {len(expected_actors)} actors, got {len(bypass_actors_result)}" + assert len(bypass_actors_result) == len(expected_actors), ( + f"Test case '{test_case}': Expected {len(expected_actors)} actors, got {len(bypass_actors_result)}" + ) for expected_actor in expected_actors: - assert ( - expected_actor in bypass_actors_result - ), f"Test case '{test_case}': Expected actor '{expected_actor}' not found in result" + assert expected_actor in bypass_actors_result, ( + f"Test case '{test_case}': Expected actor '{expected_actor}' not found in result" + ) def test_get_mapping_from_provider_missing_bypass_actors_key(self): data = { diff --git a/tests/models/test_team_permission.py b/tests/models/test_team_permission.py new file mode 100644 index 00000000..6a097d27 --- /dev/null +++ b/tests/models/test_team_permission.py @@ -0,0 +1,68 @@ +# ******************************************************************************* +# Copyright (c) 2023-2025 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from collections.abc import Mapping +from typing import Any + +from otterdog.jsonnet import JsonnetConfig +from otterdog.models import ModelObject +from otterdog.models.team_permission import TeamPermission +from otterdog.utils import Change + +from . import ModelTest + + +class TeamPermissionTest(ModelTest): + def create_model(self, data: Mapping[str, Any]) -> ModelObject: + return TeamPermission.from_model_data(data) + + @property + def template_function(self) -> str: + return JsonnetConfig.create_org_secret + + @property + def model_data(self): + return self.load_json_resource("otterdog-team-permission.json") + + @property + def provider_data(self): + return self.load_json_resource("github-team-permission.json") + + def test_load_from_model(self): + team_permission = TeamPermission.from_model_data(self.model_data) + + assert team_permission.name == "TEAM" + assert team_permission.permission == "pull" + + def test_load_from_provider(self): + team_permission = TeamPermission.from_provider_data(self.org_id, self.provider_data) + + assert team_permission.name == "TEAM" + assert team_permission.permission == "pull" + + def test_patch(self): + current = TeamPermission.from_model_data(self.model_data) + default = TeamPermission.from_model_data(self.model_data) + + default.permission = "admin" + + patch = current.get_patch_to(default) + + assert len(patch) == 1 + assert patch["permission"] == current.permission + + def test_difference(self): + current = TeamPermission.from_model_data(self.model_data) + other = TeamPermission.from_model_data(self.model_data) + + other.permission = "triage" + + diff = current.get_difference_from(other) + + assert len(diff) == 1 + assert diff["permission"] == Change(other.permission, current.permission) diff --git a/tests/operations/test_list_advisories.py b/tests/operations/test_list_advisories.py index 943b362e..d39fb930 100644 --- a/tests/operations/test_list_advisories.py +++ b/tests/operations/test_list_advisories.py @@ -118,7 +118,7 @@ async def test_execute_advisory_processing( expected_csv = ( f'"test-org","2024-01-01 00:00:00","366","2024-01-02 00:00:00","365","2024-01-03 00:00:00","","","published","high",' f'"GHSA-1234","{expected_values["cve"]}","https://github.com/advisories/GHSA-1234",' - f'{expected_values["summary_check"]}' + f"{expected_values['summary_check']}" ) assert csv_output == expected_csv diff --git a/tests/operations/test_operation.py b/tests/operations/test_operation.py index 159e4cd8..4e425a21 100644 --- a/tests/operations/test_operation.py +++ b/tests/operations/test_operation.py @@ -17,7 +17,6 @@ class OperationTest(unittest.TestCase): - # fmt: off @parameterized.expand( [ ( @@ -50,8 +49,7 @@ class OperationTest(unittest.TestCase): '~ }', ), ] - ) - # fmt: on + ) # fmt: skip @patch.multiple(Operation, __abstractmethods__=set()) def test_print_modified_dict(self, test_input, expected: str): operation = Operation() # type: ignore diff --git a/tests/providers/github/integration/helpers/http_mock.py b/tests/providers/github/integration/helpers/http_mock.py index 872e4774..866790e5 100644 --- a/tests/providers/github/integration/helpers/http_mock.py +++ b/tests/providers/github/integration/helpers/http_mock.py @@ -124,7 +124,7 @@ def _retrieve_matching_response(self, actual_request: HttpRequest, debug_info) - else: msg = self._format_unexpected_request_details(actual_request, debug_info) pytest.fail(msg, pytrace=False) - raise SystemExit(1) # explicit exit for old mypy versions, which do not understand pytest.fail() as exit + raise SystemExit(1) # explicit exit for mypy, which does not understand pytest.fail() is NoReturn def verify_all_called(self) -> None: if warnings := [f"Expected request not made: {exp.method} {exp.url}" for exp in self.expected]: diff --git a/tests/providers/github/integration/helpers/model.py b/tests/providers/github/integration/helpers/model.py index f8f8fb94..f3f66d62 100644 --- a/tests/providers/github/integration/helpers/model.py +++ b/tests/providers/github/integration/helpers/model.py @@ -7,6 +7,8 @@ # ******************************************************************************* from otterdog.models import LivePatch, LivePatchContext, ModelObject +from otterdog.models.env_secret import EnvironmentSecret +from otterdog.models.env_variable import EnvironmentVariable from otterdog.models.environment import Environment from otterdog.models.organization_secret import OrganizationSecret from otterdog.models.organization_settings import OrganizationSettings @@ -84,6 +86,7 @@ def generate_live_patch( expected_object=new, current_object=old, parent_object=self.get_parent_object(old, new), + grandparent_object=self.get_grandparent_object(old, new), context=self.live_patch_context, handler=lambda p: patches.append(p), # pyright: ignore[reportArgumentType] ) @@ -97,8 +100,19 @@ def get_parent_object(self, old: ModelObject | None, new: ModelObject | None) -> """ model_cls = determine_model_object(old, new) - if model_cls in {RepositorySecret, RepositoryVariable}: + if model_cls in {RepositorySecret, RepositoryVariable, Environment}: return self.repository if model_cls in {OrganizationSecret, OrganizationVariable}: return None # Organization-level, no parent object raise ValueError(f"Unknown model class for parent: {model_cls}") + + def get_grandparent_object(self, old: ModelObject | None, new: ModelObject | None) -> ModelObject | None: + """ + Based on provided old/new objects and test context, return the correct parent object. + Objects do not store their parents directly, so we need to reconstruct them here. + """ + + model_cls = determine_model_object(old, new) + if model_cls in {EnvironmentVariable, EnvironmentSecret}: + return self.repository + return None From f6fdfc8042a0242d0026e9e4dad86f81347ace2f Mon Sep 17 00:00:00 2001 From: Wolfgang Fischer Date: Fri, 6 Feb 2026 10:52:08 +0100 Subject: [PATCH 2/4] chore: ruff finding --- otterdog/models/repository.py | 1 - 1 file changed, 1 deletion(-) diff --git a/otterdog/models/repository.py b/otterdog/models/repository.py index 61567f1b..f77f059c 100644 --- a/otterdog/models/repository.py +++ b/otterdog/models/repository.py @@ -295,7 +295,6 @@ def requires_language_validation(self) -> bool: ) async def validate_code_scanning_languages(self, context: ValidationContext, parent_object: Any) -> None: - from .github_organization import GitHubOrganization # Only validate if provider is available and validation is required if self.requires_language_validation() and context.provider is not None: From fcff059d2a082d6698382192909b082a84fe0fb9 Mon Sep 17 00:00:00 2001 From: Wolfgang Fischer Date: Wed, 11 Feb 2026 10:56:26 +0100 Subject: [PATCH 3/4] fix: removed double team definition --- examples/template/otterdog-defaults.libsonnet | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/template/otterdog-defaults.libsonnet b/examples/template/otterdog-defaults.libsonnet index b6ddd341..3ee83d9e 100644 --- a/examples/template/otterdog-defaults.libsonnet +++ b/examples/template/otterdog-defaults.libsonnet @@ -447,7 +447,6 @@ local newOrg(name, id=name) = { { newOrg:: newOrg, newOrgRole:: newOrgRole, - newTeam:: newTeam, newOrgWebhook:: newOrgWebhook, newOrgSecret:: newOrgSecret, newOrgVariable:: newOrgVariable, From 9404cc0281dc46d6717148242b8068a67e41f89e Mon Sep 17 00:00:00 2001 From: Wolfgang Fischer Date: Wed, 11 Feb 2026 14:54:33 +0100 Subject: [PATCH 4/4] fix: add # type: ignore to calm down mypy to ignore missing library stubs --- otterdog/jsonnet.py | 4 ++-- otterdog/models/github_organization.py | 2 +- otterdog/models/team_permission.py | 2 +- otterdog/providers/github/rest/repo_client.py | 4 ++-- otterdog/utils.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/otterdog/jsonnet.py b/otterdog/jsonnet.py index c118eea0..327755fa 100644 --- a/otterdog/jsonnet.py +++ b/otterdog/jsonnet.py @@ -12,8 +12,8 @@ from shutil import ignore_patterns from typing import Any -import aiofiles.os -import aiofiles.ospath +import aiofiles.os # type: ignore +import aiofiles.ospath # type: ignore from .logging import get_logger from .utils import jsonnet_evaluate_snippet, parse_github_url, parse_template_url diff --git a/otterdog/models/github_organization.py b/otterdog/models/github_organization.py index d71984ae..624e777d 100644 --- a/otterdog/models/github_organization.py +++ b/otterdog/models/github_organization.py @@ -228,7 +228,7 @@ async def validate( @staticmethod def _validate_org_config(data: dict[str, Any]) -> None: - from jsonschema import Draft202012Validator + from jsonschema import Draft202012Validator # type: ignore from referencing import Registry, Resource from referencing.exceptions import NoSuchResource diff --git a/otterdog/models/team_permission.py b/otterdog/models/team_permission.py index 0e46fdc4..4663d9de 100644 --- a/otterdog/models/team_permission.py +++ b/otterdog/models/team_permission.py @@ -11,7 +11,7 @@ import dataclasses from typing import TYPE_CHECKING, Any -from jsonbender import F, OptionalS +from jsonbender import F, OptionalS # type: ignore from otterdog.models import ( FailureType, diff --git a/otterdog/providers/github/rest/repo_client.py b/otterdog/providers/github/rest/repo_client.py index 7065223d..3b374d04 100644 --- a/otterdog/providers/github/rest/repo_client.py +++ b/otterdog/providers/github/rest/repo_client.py @@ -15,8 +15,8 @@ import zipfile from typing import Any -import aiofiles -import chevron +import aiofiles # type: ignore +import chevron # type: ignore from otterdog.logging import is_trace_enabled from otterdog.providers.github.exception import GitHubException diff --git a/otterdog/utils.py b/otterdog/utils.py index f4484850..9d3bde0d 100644 --- a/otterdog/utils.py +++ b/otterdog/utils.py @@ -639,7 +639,7 @@ def wrapper_timed(*args, **kwargs): def render_chevron(content: str, context: dict[str, Any]) -> str: - import chevron + import chevron # type: ignore # need to escape ${{ sequences as used at GitHub escaped_content = content.replace("${{", "$\\{\\{")