diff --git a/.env.example b/.env.example index febae2d29..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,44 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" - -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== - -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" - -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..c19e78c42 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,12 +5,20 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - effect - either + - foldable-traversable + - json - maybe + - node-fs + - node-path + - node-process - prelude + - registry-app + - registry-foreign - registry-lib - registry-test-utils - spec diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..be9f3ba8f --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,218 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | These tests exercise the full flow: parsing a GitHub event, submitting to +-- | the registry API, polling for completion, and posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation (AuthenticatedData) +import Registry.Operation as Operation +import Registry.Test.E2E.Client as Client +import Registry.Test.E2E.Fixtures as Fixtures +import Registry.Test.E2E.WireMock (WireMockRequest) +import Registry.Test.E2E.WireMock as WireMock +import Test.Spec (Spec) +import Test.Spec as Spec + +spec :: Spec Unit +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.before clearWireMockJournal do + + Spec.it "handles a publish via GitHub issue, posts comments, and closes issue on success" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.effectPublishData + + assertJobSucceeded result + assertHasComment jobStartedText result + assertHasComment jobCompletedText result + assertIssueClosed result + + Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEventFrom "random-user" Fixtures.failingTransferData + + assertJobFailed result + assertHasComment jobStartedText result + assertHasComment jobFailedText result + assertNoComment jobCompletedText result + assertIssueOpen result + + Spec.it "re-signs authenticated operation for trustee (job fails due to unpublish time limit)" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEvent Fixtures.trusteeAuthenticatedData + + assertHasComment jobStartedText result + assertTeamsApiCalled result + + where + clearWireMockJournal :: Aff Unit + clearWireMockJournal = do + wmConfig <- liftEffect WireMock.configFromEnv + WireMock.clearRequestsOrFail wmConfig + +testIssueNumber :: Int +testIssueNumber = 101 + +-- | Username configured as a packaging team member in test WireMock fixtures. +-- | See nix/test/config.nix for the GitHub Teams API stub. +packagingTeamUsername :: String +packagingTeamUsername = "packaging-team-user" + +jobStartedText :: String +jobStartedText = "Job started" + +jobCompletedText :: String +jobCompletedText = "Job completed successfully" + +jobFailedText :: String +jobFailedText = "Job failed" + +packagingTeamMembersPath :: String +packagingTeamMembersPath = "/orgs/purescript/teams/packaging/members" + +testPollConfig :: GitHubIssue.PollConfig +testPollConfig = + { maxAttempts: 60 + , interval: Milliseconds 500.0 + } + +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkGitHubPublishEvent :: Operation.PublishData -> String +mkGitHubPublishEvent publishData = + let + publishJson = JSON.print $ CJ.encode Operation.publishCodec publishData + body = "```json\n" <> publishJson <> "\n```" + event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +mkGitHubAuthenticatedEvent :: AuthenticatedData -> String +mkGitHubAuthenticatedEvent = mkGitHubAuthenticatedEventFrom packagingTeamUsername + +mkGitHubAuthenticatedEventFrom :: String -> AuthenticatedData -> String +mkGitHubAuthenticatedEventFrom username authData = + let + authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData + body = "```json\n" <> authJson <> "\n```" + event = { sender: { login: username }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +issuePath :: Int -> String +issuePath n = "/issues/" <> show n + +issueCommentsPath :: Int -> String +issueCommentsPath n = issuePath n <> "/comments" + +commentRequests :: Array WireMockRequest -> Array WireMockRequest +commentRequests = + WireMock.filterByMethod "POST" + >>> WireMock.filterByUrlContaining (issueCommentsPath testIssueNumber) + +closeRequests :: Array WireMockRequest -> Array WireMockRequest +closeRequests = + WireMock.filterByMethod "PATCH" + >>> WireMock.filterByUrlContaining (issuePath testIssueNumber) + +teamsRequests :: Array WireMockRequest -> Array WireMockRequest +teamsRequests = + WireMock.filterByMethod "GET" + >>> WireMock.filterByUrlContaining packagingTeamMembersPath + +bodyContains :: String -> WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) + +hasComment :: String -> Array WireMockRequest -> Boolean +hasComment text = Array.any (bodyContains text) + +-- | Result of running the GitHubIssue workflow. +type RunResult = + { success :: Boolean + , requests :: Array WireMockRequest + } + +-- | Run the GitHub issue workflow with a given event JSON. +-- | Handles server check, temp file creation, env setup, and request capture. +runWorkflowWithEvent :: String -> Aff RunResult +runWorkflowWithEvent eventJson = do + -- Verify server is reachable + config <- liftEffect Client.configFromEnv + statusResult <- Client.getStatus config + case statusResult of + Left err -> Aff.throwError $ Aff.error $ "Server not reachable: " <> Client.printClientError err + Right _ -> pure unit + + -- Write event to temp file + tmpDir <- Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + -- Initialize and run workflow + envResult <- GitHubIssue.initializeGitHub + case envResult of + Nothing -> + Aff.throwError $ Aff.error "initializeGitHub returned Nothing" + Just env -> do + let testEnv = env { pollConfig = testPollConfig, logVerbosity = Quiet } + result <- GitHubIssue.runGitHubIssue testEnv + + -- Capture WireMock requests + wmConfig <- liftEffect WireMock.configFromEnv + requests <- WireMock.getRequestsOrFail wmConfig + + case result of + Left err -> + WireMock.failWithRequests ("runGitHubIssue failed: " <> err) requests + Right success -> + pure { success, requests } + +assertJobSucceeded :: RunResult -> Aff Unit +assertJobSucceeded { success, requests } = + unless success do + WireMock.failWithRequests "Job did not succeed" requests + +assertJobFailed :: RunResult -> Aff Unit +assertJobFailed { success, requests } = + when success do + WireMock.failWithRequests "Expected job to fail but it succeeded" requests + +assertHasComment :: String -> RunResult -> Aff Unit +assertHasComment text { requests } = + unless (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> RunResult -> Aff Unit +assertNoComment text { requests } = + when (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertIssueClosed :: RunResult -> Aff Unit +assertIssueClosed { requests } = + when (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to be closed, but no close request was made" requests + +assertIssueOpen :: RunResult -> Aff Unit +assertIssueOpen { requests } = + unless (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to remain open, but a close request was made" requests + +assertTeamsApiCalled :: RunResult -> Aff Unit +assertTeamsApiCalled { requests } = + when (Array.null (teamsRequests requests)) do + WireMock.failWithRequests "Expected GitHub Teams API to be called, but no such request was seen" requests diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs index 7bc030d76..bbd7f3212 100644 --- a/app-e2e/src/Test/E2E/Main.purs +++ b/app-e2e/src/Test/E2E/Main.purs @@ -5,6 +5,7 @@ import Prelude import Data.Maybe (Maybe(..)) import Data.Time.Duration (Milliseconds(..)) import Effect (Effect) +import Test.E2E.GitHubIssue as Test.E2E.GitHubIssue import Test.E2E.Publish as Test.E2E.Publish import Test.Spec as Spec import Test.Spec.Reporter.Console (consoleReporter) @@ -15,6 +16,7 @@ main :: Effect Unit main = runSpecAndExitProcess' config [ consoleReporter ] do Spec.describe "E2E Tests" do Spec.describe "Publish" Test.E2E.Publish.spec + Spec.describe "GitHubIssue" Test.E2E.GitHubIssue.spec where config = { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index f7bd1d63e..4168e1610 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -6,16 +6,16 @@ import Prelude import Data.Array as Array import Data.Either (Either(..)) +import Data.Foldable (for_) import Data.Maybe (Maybe(..), isJust) import Data.String as String import Effect.Aff (Aff) import Effect.Class (liftEffect) import Effect.Class.Console as Console import Registry.API.V1 as V1 -import Registry.Location as Registry.Location import Registry.Test.Assert as Assert import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils +import Registry.Test.E2E.Fixtures as Fixtures import Test.Spec (Spec) import Test.Spec as Spec @@ -41,25 +41,11 @@ spec = do Right _ -> pure unit -- Jobs list may not be empty if other tests ran Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do + Spec.it "can publish effect@4.0.0 and filter logs" do config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - } -- Submit publish request - publishResult <- Client.publish config publishData + publishResult <- Client.publish config Fixtures.effectPublishData case publishResult of Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err Right { jobId } -> do @@ -67,18 +53,52 @@ spec = do job <- Client.pollJob config jobId -- If job failed, print logs for debugging - unless job.success do + unless (V1.jobInfo job).success do Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs Console.log $ String.joinWith "\n" logMessages -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs + when (not (V1.jobInfo job).success) do + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs let errorMessages = map _.message errorLogs Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + -- Test log level filtering + allLogsResult <- Client.getJob config jobId (Just V1.Debug) Nothing + case allLogsResult of + Left err -> Assert.fail $ "Failed to get job with DEBUG level: " <> Client.printClientError err + Right allLogsJob -> do + let allLogs = (V1.jobInfo allLogsJob).logs + + infoLogsResult <- Client.getJob config jobId (Just V1.Info) Nothing + case infoLogsResult of + Left err -> Assert.fail $ "Failed to get job with INFO level: " <> Client.printClientError err + Right infoLogsJob -> do + let infoLogs = (V1.jobInfo infoLogsJob).logs + let debugOnlyLogs = Array.filter (\l -> l.level == V1.Debug) allLogs + + -- INFO logs should not contain any DEBUG logs + let infoContainsDebug = Array.any (\l -> l.level == V1.Debug) infoLogs + when infoContainsDebug do + Assert.fail "INFO level filter returned DEBUG logs" + + -- If there were DEBUG logs, INFO result should be smaller + when (Array.length debugOnlyLogs > 0) do + Assert.shouldSatisfy (Array.length infoLogs) (_ < Array.length allLogs) + + -- Test timestamp filtering + let logs = (V1.jobInfo job).logs + when (Array.length logs >= 2) do + case Array.index logs 0 of + Nothing -> pure unit + Just firstLog -> do + sinceResult <- Client.getJob config jobId (Just V1.Debug) (Just firstLog.timestamp) + case sinceResult of + Left err -> Assert.fail $ "Failed to get job with since filter: " <> Client.printClientError err + Right sinceJob -> do + let sinceLogs = (V1.jobInfo sinceJob).logs + for_ sinceLogs \l -> + Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 89322d52b..c5d174e94 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan , packageSetUpdate + , packageSetUpdate2 , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -31,7 +30,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet @@ -59,8 +58,6 @@ import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) @@ -83,6 +80,8 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.SQLite (PackageSetJobDetails) +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob @@ -114,7 +113,12 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) + +packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate2 {} = do + -- TODO: have github call into this + pure unit -- | Process a package set update. Package set updates are only processed via -- | GitHub and not the HTTP API, so they require access to the GitHub env. @@ -222,18 +226,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -293,7 +297,7 @@ authenticated auth = case auth.payload of Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -324,11 +328,11 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -338,7 +342,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -444,13 +448,13 @@ publish maybeLegacyIndex payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr Right config -> case SpagoYaml.spagoYamlToManifest config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -459,7 +463,7 @@ publish maybeLegacyIndex payload = do pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -475,7 +479,7 @@ publish maybeLegacyIndex payload = do Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -550,22 +554,23 @@ publish maybeLegacyIndex payload = do ] Nothing | payload.compiler < Purs.minPursuitPublish -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -573,7 +578,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -589,22 +594,23 @@ publish maybeLegacyIndex payload = do Left publishErr -> Except.throw publishErr Right _ -> do FS.Extra.remove tmp - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions - Comment.comment "Verifying unused and/or missing dependencies..." + Log.notice "Verifying unused and/or missing dependencies..." -- First we install the resolutions and call 'purs graph' to adjust the -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -687,7 +693,7 @@ publish maybeLegacyIndex payload = do -- Now that we have the package source contents we can verify we can compile -- the package with exactly what is going to be uploaded. - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler , " and resolutions:\n" @@ -699,7 +705,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -708,11 +714,11 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit - Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." let tarballName = packageDirname <> ".tar.gz" let tarballPath = Path.concat [ tmp, tarballName ] Tar.create { cwd: tmp, folderName: packageDirname } @@ -723,7 +729,7 @@ publish maybeLegacyIndex payload = do Operation.Validation.ExceedsMaximum maxPackageBytes -> Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." -- If a package has under ~30 bytes it's about guaranteed that packaging the -- tarball failed. This can happen if the system running the API has a non- @@ -742,7 +748,7 @@ publish maybeLegacyIndex payload = do let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -750,7 +756,7 @@ publish maybeLegacyIndex payload = do Registry.writeManifest manifest Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" + Log.notice "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" if payload.compiler >= Purs.minPursuitPublish then @@ -760,38 +766,45 @@ publish maybeLegacyIndex payload = do publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" , Version.print payload.compiler , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Log.notice "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.notice "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -876,32 +889,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -910,7 +921,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -921,49 +932,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -999,7 +967,7 @@ type PublishToPursuit = publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1034,7 +1002,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1181,13 +1149,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version @@ -1207,7 +1168,7 @@ conformLegacyManifest -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let manifestRequired :: SemigroupMap PackageName Intersection @@ -1304,7 +1265,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p UnusedDependencies names -> do Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1313,7 +1274,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p MissingDependencies names -> do Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1324,7 +1285,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let trimmed = Map.difference manifest.dependencies unused' Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..031c91a62 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,13 +5,30 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite + ( FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails + , SQLite + , SelectJobRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails + ) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +38,24 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) + | ResetIncompleteJobs a derive instance Functor Db @@ -39,28 +67,67 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +-- | Select all logs for a given job, filtered by loglevel. +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow + +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) + +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow + +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow + +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +137,68 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log - pure next + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result + + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job + pure $ reply result + + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result + + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJob request reply -> do + result <- Run.liftEffect $ SQLite.selectJob env.db request + pure $ reply result + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db + pure $ reply result + + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result + + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..cd7880602 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of @@ -134,5 +138,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 56422ab64..e3eb353aa 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,121 +14,246 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - API.publish Nothing payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret (Source.handle Source.Recent) + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Cache.interpret API._compilerCache (Cache.handleFs cache) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left (PackageSetUpdate payload) -> pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Notice && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -127,10 +261,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -138,17 +273,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -173,10 +303,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -199,9 +330,6 @@ readOperation eventPath = do pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -241,7 +369,7 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code -- | or issue creation. -- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..e638cc684 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,90 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = do + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = do + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 + + loop restartDelay = do + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..8b0a1765e 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,13 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +19,166 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = ['jobId', 'payload'] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +const _selectJob = (db, { table, jobId }) => { + const params = []; + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + `; + + if (jobId === null) { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } else { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId }); +}; + +export const selectUnpublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId }); +}; + +export const selectTransferJobImpl = (db, jobId) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId }); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectPackageSetJobImpl = (db, jobId) => { + return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; -export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them +export const resetIncompleteJobsImpl = (db) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 + `; + + const stmt = db.prepare(query); + return stmt.all(jobId, logLevel, since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..783b5f756 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,712 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob + ( ConnectOptions + , FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails , SQLite + , SelectJobRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails , connect - , createJob - , deleteIncompleteJobs , finishJob - , insertLog - , runningJobForPackage + , insertLogLine + , insertMatrixJob + , insertPackageSetJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs , selectJob , selectLogsByJob + , selectNextMatrixJob + , selectNextPackageSetJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , startJob ) where import Registry.App.Prelude +import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Data.String as String +import Data.UUID.Random as UUID +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature) +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-------------------------------------------------------------------------------- +-- job_info table -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo + +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSStartJob = + { jobId :: String + , startedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: fromSuccess success + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit + +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id -type NewJob = +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect (Either String (Maybe Job)) +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + case fail of + [] -> runExceptT $ firstJust + [ selectPublishJob logs + , selectMatrixJob logs + , selectTransferJob logs + , selectPackageSetJob logs + , selectUnpublishJob logs + ] + _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail + where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + + selectPublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectPackageSetJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) + <<< packageSetJobDetailsFromJSRep + ) + maybeJobDetails + +-------------------------------------------------------------------------------- +-- publish_jobs table + +type PublishJobDetails = { jobId :: JobId - , jobType :: JobType , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PublishData } -type JSNewJob = +type JSPublishJobDetails = { jobId :: String - , jobType :: String , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db Nullable.null + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String } -type JobResult = +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = { jobId :: JobId - , finishedAt :: DateTime + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData } -type JSJobResult = +type JSUnpublishJobDetails = { jobId :: String - , finishedAt :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db Nullable.null + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature } -type Job = +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = { jobId :: JobId - , jobType :: JobType + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed + } + +foreign import selectTransferJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSTransferJobDetails) + +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db Nullable.null + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertTransferJob = + { jobId :: String + , packageName :: String + , payload :: String + , createdAt :: String + } + +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- matrix_jobs table + +type InsertMatrixJob = + { packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , createdAt :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload + } + +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId +insertMatrixJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId + +type MatrixJobDetails = + { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJob = +type JSMatrixJobDetails = { jobId :: String - , jobType :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + } + +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +-------------------------------------------------------------------------------- +-- package_set_jobs table + +type PackageSetJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , payload :: PackageSetOperation + } + +type JSPackageSetJobDetails = + { jobId :: String , createdAt :: String + , startedAt :: Nullable String , finishedAt :: Nullable String , success :: Int + , payload :: String + } + +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , payload: parsed + } + +foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db Nullable.null + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +type InsertPackageSetJob = + { payload :: PackageSetOperation + } + +type JSInsertPackageSetJob = + { jobId :: String + , createdAt :: String + , payload :: String + } + +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , payload: stringifyJson Operation.packageSetOperationCodec payload + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId +insertPackageSetJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- logs table + +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } + +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } - -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob - -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult - -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob - -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob - -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = DateTime.format Internal.Format.iso8601DateTime since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + timestamp + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index b9aa35b1c..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,343 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..335764eef --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,185 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.resetIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..2ede4307a --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,181 @@ +module Registry.App.Server.JobExecutor + ( runJobExecutor + ) where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) +import Control.Parallel as Parallel +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Map as Map +import Data.Set as Set +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (Run) +import Run.Except (EXCEPT) + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler + Db.resetIncompleteJobs + loop + where + loop = do + maybeJob <- findNextAvailableJob + case maybeJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 1000.0) + loop + + Just job -> do + now <- nowUTC + let + jobId = (V1.jobInfo job).jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. + jobResult <- liftAff do + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } + loop + +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) +findNextAvailableJob = runMaybeT + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) + +executeJob :: DateTime -> Job -> Run ServerEffects Unit +executeJob _ = case _ of + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeResult \{ dependencies, version } -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + PackageSetJob _details -> + -- TODO: need to pass in the package_sets effect + -- API.packageSetUpdate2 details + pure unit + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (not (Map.isEmpty manifest.dependencies)) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..8db8e883b --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,230 @@ +module Registry.App.Server.MatrixBuilder + ( checkIfNewCompiler + , installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.API.V1 (MatrixJobData) +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + pure Nothing + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + pure Nothing + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> pure $ Just { compiler: target, resolutions, name, version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..9143508de --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,97 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Effect.Aff as Aff +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation as Operation +import Run (Run) +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + jobId <- lift $ Db.insertPublishJob { payload: publish } + jsonOk V1.jobCreatedResponseCodec { jobId } + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + jobId <- lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + jobId <- lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected transfer operation." + + -- TODO return jobs + Jobs, Get -> do + _now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [] + + Job jobId { level: maybeLogLevel, since }, Get -> do + now <- liftEffect nowUTC + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe now since }) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> do + HTTPurple.notFound + Right (Just job) -> jsonOk V1.jobCodec job + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound diff --git a/app/test/App/API.purs b/app/test/App/API.purs index caaf6c215..122879e49 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,12 +96,13 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version: version , resolutions: Nothing } -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -158,10 +159,11 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. @@ -172,10 +174,11 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing @@ -103,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -121,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 42cc7d6ab..8c3e24195 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -30,8 +30,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -89,7 +87,6 @@ type TEST_EFFECTS = + GITHUB_CACHE + LEGACY_CACHE + COMPILER_CACHE - + COMMENT + LOG + EXCEPT String + AFF @@ -129,7 +126,6 @@ runTestEffects env operation = Aff.attempt do # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..71727f473 --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,74 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..1baf6403f 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,44 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/flake.nix b/flake.nix index 56a98f696..9aacbaa80 100644 --- a/flake.nix +++ b/flake.nix @@ -216,10 +216,13 @@ name = "registry-dev"; inherit GIT_LFS_SKIP_SMUDGE; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..fb4bd3b54 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,33 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetJobData + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLogLevel + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,17 +36,26 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) +import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish @@ -64,29 +99,169 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | PackageSetJob PackageSetJobData + +type JobInfo r = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +type PackageSetJobData = JobInfo + ( payload :: PackageSetOperation + , jobType :: Proxy "packageset" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + PackageSetJob j -> CJ.encode packageSetJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec , packageName: PackageName.codec - , ref: CJ.string + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +packageSetJobDataCodec :: CJ.Codec PackageSetJobData +packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "packageset") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , payload: Operation.packageSetOperationCodec + } + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ @@ -94,25 +269,22 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | PackageSetJobType derive instance Eq JobType -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - printJobType :: JobType -> String printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + PackageSetJobType -> "packageset" type LogLine = { level :: LogLevel @@ -129,7 +301,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -139,6 +311,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -147,6 +320,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -154,6 +328,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -162,5 +337,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -20,10 +22,10 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -87,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = @@ -199,6 +206,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index ddc39b48b..c54bed31e 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,20 +20,15 @@ module Registry.Metadata import Prelude -import Control.Alt ((<|>)) -import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) -import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) -import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor -import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,6 +23,9 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +61,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +96,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +107,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +202,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String diff --git a/nix/overlay.nix b/nix/overlay.nix index 71049d6e8..7eed2916d 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -181,8 +181,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out @@ -235,7 +236,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/nix/test/config.nix b/nix/test/config.nix index 66813fe5b..3c06276e5 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -30,6 +30,7 @@ let # Mock service URLs for test environment mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; s3 = "http://localhost:${toString ports.s3}"; bucket = "http://localhost:${toString ports.bucket}"; @@ -37,17 +38,29 @@ let healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include # mock secrets, then overrides external services with mock URLs. The DATABASE_URL # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are # implemented in the script directly. testEnv = envDefaults // { # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; S3_API_URL = mockUrls.s3; S3_BUCKET_URL = mockUrls.bucket; PURSUIT_API_URL = mockUrls.pursuit; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = @@ -153,6 +166,54 @@ let }; }; } + # Accept issue comment creation (used by GitHubIssue workflow) + { + request = { + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; + }; + response = { + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; + }; + } + # Accept issue closing (used by GitHubIssue workflow) + { + request = { + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; + }; + } + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) + { + request = { + method = "GET"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { + login = "packaging-team-user"; + id = 1; + } + ]; + }; + } ]; # S3 API wiremock mappings (serves package tarballs) @@ -357,38 +418,39 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ - GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ - git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..bc4f333e0 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -57,7 +57,10 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + + # Export test environment variables for E2E test runners + ${testEnv.testConfig.envToExports testEnv.testConfig.testEnv} + mkdir -p $STATE_DIR # Start wiremock services diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..1365d8283 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -54,6 +58,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +74,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.gitMockOverlay ]; virtualisation = { graphics = false; @@ -70,12 +83,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..f7d7fb058 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -95,7 +95,8 @@ let testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" + # Export test environment variables for E2E test runners + ${testConfig.envToExports testConfig.testEnv} if [ -z "''${STATE_DIR:-}" ]; then STATE_DIR="$(mktemp -d)" @@ -130,8 +131,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/package-lock.json b/package-lock.json index 3e868b0c6..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -253,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -319,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -368,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -392,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -408,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -429,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -454,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -473,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -496,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -513,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -532,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -550,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -568,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -583,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -607,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -622,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -636,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -650,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -666,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -691,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -705,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -723,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -772,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -788,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -805,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -823,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -836,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -848,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -864,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -876,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -912,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1169,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1207,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1224,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1245,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1261,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1276,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1290,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1303,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1317,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1331,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1347,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1362,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1377,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1391,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1416,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1430,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1444,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1463,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1483,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1497,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1510,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1525,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1541,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1554,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1567,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1581,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1594,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1619,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1638,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1656,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1668,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1745,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1760,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1778,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1804,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1817,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1831,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1875,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -2193,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..910233047 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -59,7 +59,6 @@ import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -76,6 +75,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit @@ -188,7 +188,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' @@ -300,7 +299,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -405,7 +404,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler @@ -471,6 +470,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..399af2e93 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -20,7 +20,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -158,7 +157,6 @@ main = launchAff_ do >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) >>> Cache.interpret _compilerCache (Cache.handleFs cache) - >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' @@ -239,10 +237,11 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 8fa9a7070..51f2ef993 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff @@ -32,7 +31,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -151,7 +149,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/spago.lock b/spago.lock index 83d2afb8d..a6dbae907 100644 --- a/spago.lock +++ b/spago.lock @@ -313,12 +313,20 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", "effect", "either", + "foldable-traversable", + "json", "maybe", + "node-fs", + "node-path", + "node-process", "prelude", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "spec", @@ -327,6 +335,7 @@ ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +343,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +352,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +378,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +388,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +432,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +459,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index 960484609..9d8b6b0b4 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -74,14 +74,14 @@ configFromEnv = do -- | Errors that can occur during client operations data ClientError = HttpError { status :: Int, body :: String } - | ParseError String + | ParseError { msg :: String, raw :: String } | Timeout String | NetworkError String printClientError :: ClientError -> String printClientError = case _ of HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw Timeout msg -> "Timeout: " <> msg NetworkError msg -> "Network Error: " <> msg @@ -102,7 +102,7 @@ get codec config path = runExceptT do body <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse codec body of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: body } Right a -> pure a else throwError $ HttpError { status: response.status, body } @@ -119,7 +119,7 @@ post reqCodec resCodec config path reqBody = runExceptT do responseBody <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: responseBody } Right a -> pure a else throwError $ HttpError { status: response.status, body: responseBody } @@ -175,6 +175,6 @@ pollJob config jobId = go 1 case result of Left err -> throwError $ toError err Right job -> - case job.finishedAt of + case (V1.jobInfo job).finishedAt of Just _ -> pure job Nothing -> go (attempt + 1) diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs new file mode 100644 index 000000000..70f1242b0 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/Fixtures.purs @@ -0,0 +1,76 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Registry.Test.E2E.Fixtures + ( effectPublishData + , failingTransferData + , trusteeAuthenticatedData + ) where + +import Prelude + +import Data.Codec.JSON as CJ +import Data.Maybe (Maybe(..)) +import JSON as JSON +import Registry.Location as Location +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) +import Registry.Operation as Operation +import Registry.SSH (Signature(..)) +import Registry.Test.Utils as Utils + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: Operation.PublishData +effectPublishData = + { name: Utils.unsafePackageName "effect" + , location: Just $ Location.GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" + } + +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = + let + transferPayload :: TransferData + transferPayload = + { name: Utils.unsafePackageName "prelude" + , newLocation: Location.GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } + } + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + in + { payload: Transfer transferPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: Utils.unsafePackageName "prelude" + , version: Utils.unsafeVersion "6.0.1" + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + in + { payload: Unpublish unpublishPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } diff --git a/test-utils/src/Registry/Test/E2E/WireMock.purs b/test-utils/src/Registry/Test/E2E/WireMock.purs new file mode 100644 index 000000000..6895d9e44 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/WireMock.purs @@ -0,0 +1,164 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +module Registry.Test.E2E.WireMock + ( WireMockConfig + , WireMockRequest + , WireMockError(..) + , configFromEnv + , getRequests + , getRequestsOrFail + , clearRequests + , clearRequestsOrFail + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Prelude + +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Trans.Class (lift) +import Data.Array as Array +import Data.Bifunctor (lmap) +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Either (Either(..)) +import Data.Int as Int +import Data.Maybe (Maybe(..)) +import Data.String as String +import Effect (Effect) +import Effect.Aff (Aff) +import Effect.Aff as Aff +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import Effect.Exception (Error) +import JSON as JSON +import Node.Process as Process +import Codec.JSON.DecodeError as CJ.DecodeError + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Create config from GITHUB_API_URL environment variable. +-- | Convenience for tests that need to inspect GitHub mock requests. +-- | Each WireMock instance has its own admin API on the same port. +configFromEnv :: Effect WireMockConfig +configFromEnv = do + maybeUrl <- Process.lookupEnv "GITHUB_API_URL" + case maybeUrl of + Nothing -> Effect.Exception.throw "GITHUB_API_URL environment variable is not set." + Just baseUrl -> pure { baseUrl } + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from WireMock's journal +getRequests :: WireMockConfig -> Aff (Either WireMockError (Array WireMockRequest)) +getRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from WireMock's journal +clearRequests :: WireMockConfig -> Aff (Either WireMockError Unit) +clearRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Get requests, throwing on error. Useful in tests where failure should abort. +getRequestsOrFail :: WireMockConfig -> Aff (Array WireMockRequest) +getRequestsOrFail config = do + result <- getRequests config + case result of + Left err -> + throwError $ Aff.error $ "Failed to get WireMock requests: " <> printWireMockError err + Right rs -> + pure rs + +-- | Clear requests, throwing on error. Useful in test setup. +clearRequestsOrFail :: WireMockConfig -> Aff Unit +clearRequestsOrFail config = do + result <- clearRequests config + case result of + Left err -> + Aff.throwError $ Aff.error $ "Failed to clear WireMock journal: " <> printWireMockError err + Right _ -> + pure unit + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests requests = String.joinWith "\n" $ map formatRequest requests + where + formatRequest r = r.method <> " " <> r.url <> case r.body of + Nothing -> "" + Just b -> "\n Body: " <> b + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ + msg <> "\n\nCaptured requests:\n" <> formatRequests requests