diff --git a/apps/server-nestjs/Dockerfile b/apps/server-nestjs/Dockerfile index 239c894696..392b54819b 100644 --- a/apps/server-nestjs/Dockerfile +++ b/apps/server-nestjs/Dockerfile @@ -51,6 +51,9 @@ FROM dev AS build RUN pnpm --filter @cpn-console/logger run build RUN pnpm --filter @cpn-console/shared run build +# Build hooks (génère dist/ et types/ nécessaires aux imports) +RUN pnpm --filter @cpn-console/hooks run build + # Réinjecter shared buildé dans node_modules (injectWorkspacePackages copie au moment de l'install) RUN pnpm --filter @cpn-console/server-nestjs install --frozen-lockfile diff --git a/apps/server-nestjs/package.json b/apps/server-nestjs/package.json index 139c91d6ea..c690249dc5 100644 --- a/apps/server-nestjs/package.json +++ b/apps/server-nestjs/package.json @@ -44,6 +44,7 @@ "@fastify/swagger": "^8.15.0", "@fastify/swagger-ui": "^4.2.0", "@gitbeaker/core": "^40.6.0", + "@gitbeaker/requester-utils": "^40.6.0", "@gitbeaker/rest": "^40.6.0", "@keycloak/keycloak-admin-client": "^24.0.0", "@kubernetes-models/argo-cd": "^2.7.2", @@ -80,6 +81,7 @@ "rxjs": "^7.8.2", "undici": "^7.24.0", "vitest-mock-extended": "^2.0.2", + "yaml": "^2.7.1", "zod": "^3.25.76" }, "devDependencies": { diff --git a/apps/server-nestjs/src/cpin-module/application-initialization/application-initialization-service/application-initialization.service.ts b/apps/server-nestjs/src/cpin-module/application-initialization/application-initialization-service/application-initialization.service.ts index 431519f2b5..9889e6e075 100644 --- a/apps/server-nestjs/src/cpin-module/application-initialization/application-initialization-service/application-initialization.service.ts +++ b/apps/server-nestjs/src/cpin-module/application-initialization/application-initialization-service/application-initialization.service.ts @@ -31,23 +31,27 @@ export class ApplicationInitializationService { } async injectDataInDatabase(path: string) { - this.logger.log('Starting init DB...') + this.logger.log(`Starting database initialization using data from ${path}`) const { data } = await import(path) await this.databaseInitializationService.initDb(data) - this.logger.log('initDb invoked successfully') + this.logger.log('Database initialization completed successfully') } async initApp() { try { await this.databaseService.getConnection() } catch (error) { - this.logger.error(error.message) + if (error instanceof Error) { + this.logger.error(`Database connection failed: ${error.message}`, error.stack) + } else { + this.logger.error(`Database connection failed: ${String(error)}`) + } throw error } this.pluginManagementService.initPm() - this.logger.log('Reading init database file') + this.logger.log('Loading database initialization data') try { const dataPath @@ -60,35 +64,36 @@ export class ApplicationInitializationService { this.configurationService.isProd && !this.configurationService.isDevSetup ) { - this.logger.log('Cleaning up imported data file...') + this.logger.log(`Cleaning up imported data module at ${dataPath}`) await rm(resolve(__dirname, dataPath)) - this.logger.log(`Successfully deleted '${dataPath}'`) + this.logger.log(`Deleted imported data module at ${dataPath}`) } } catch (error) { - if ( - error.code === 'ERR_MODULE_NOT_FOUND' - || error.message.includes('Failed to load') - || error.message.includes('Cannot find module') - ) { - this.logger.log('No initDb file, skipping') + if (error instanceof Error) { + const errno = error as NodeJS.ErrnoException + const errorCode = typeof errno.code === 'string' ? errno.code : undefined + if ( + errorCode === 'ERR_MODULE_NOT_FOUND' + || error.message.includes('Failed to load') + || error.message.includes('Cannot find module') + ) { + this.logger.log('No database initialization data module was found, so initialization was skipped') + } else { + this.logger.warn(`Database initialization failed: ${error.message}`) + throw error + } } else { - this.logger.warn(error.message) + this.logger.warn(`Database initialization failed: ${String(error)}`) throw error } } - this.logger.debug({ - isDev: this.configurationService.isDev, - isTest: this.configurationService.isTest, - isCI: this.configurationService.isCI, - isDevSetup: this.configurationService.isDevSetup, - isProd: this.configurationService.isProd, - }) + this.logger.debug(`Runtime environment flags: isDev=${this.configurationService.isDev} isTest=${this.configurationService.isTest} isCI=${this.configurationService.isCI} isDevSetup=${this.configurationService.isDevSetup} isProd=${this.configurationService.isProd}`) } async exitGracefully(error?: Error) { if (error instanceof Error) { - this.logger.fatal(error) + this.logger.fatal(`Exiting due to an unhandled error: ${error.message}`) } // @TODO Determine if it is necessary, or if we would rather plug ourselves // onto NestJS lifecycle, or even if all this is actually necessary @@ -96,18 +101,22 @@ export class ApplicationInitializationService { // // await app.close(); - this.logger.log('Closing connections...') + this.logger.log('Closing connections') await this.databaseService.closeConnections() - this.logger.log('Exiting...') + this.logger.log('Exiting') process.exit(error instanceof Error ? 1 : 0) } logExitCode(code: number) { - this.logger.warn(`received signal: ${code}`) + this.logger.warn(`Process is exiting with code ${code}`) } - logUnhandledRejection(reason: unknown, promise: Promise) { - this.logger.error({ message: 'Unhandled Rejection', promise, reason }) + logUnhandledRejection(reason: unknown, _promise: Promise) { + if (reason instanceof Error) { + this.logger.error(`Unhandled Promise rejection: ${reason.message}`, reason.stack) + return + } + this.logger.error(`Unhandled Promise rejection: ${String(reason)}`) } handleExit() { diff --git a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts index f049b37641..80753c6edb 100644 --- a/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts +++ b/apps/server-nestjs/src/cpin-module/infrastructure/configuration/configuration.service.ts @@ -36,10 +36,70 @@ export class ConfigurationService { = process.env.CONTACT_EMAIL ?? 'cloudpinative-relations@interieur.gouv.fr' + // argocd + argoNamespace = process.env.ARGO_NAMESPACE ?? 'argocd' + argocdUrl = process.env.ARGOCD_URL + argocdExtraRepositories = process.env.ARGOCD_EXTRA_REPOSITORIES + + // dso + dsoEnvChartVersion = process.env.DSO_ENV_CHART_VERSION ?? 'dso-env-1.6.0' + dsoNsChartVersion = process.env.DSO_NS_CHART_VERSION ?? 'dso-ns-1.1.5' + // plugins mockPlugins = process.env.MOCK_PLUGINS === 'true' projectRootDir = process.env.PROJECTS_ROOT_DIR pluginsDir = process.env.PLUGINS_DIR ?? '/plugins' + + // gitlab + gitlabToken = process.env.GITLAB_TOKEN + gitlabUrl = process.env.GITLAB_URL + gitlabInternalUrl = process.env.GITLAB_INTERNAL_URL + + gitlabMirrorTokenExpirationDays = Number(process.env.GITLAB_MIRROR_TOKEN_EXPIRATION_DAYS ?? 180) + gitlabMirrorTokenRotationThresholdDays = Number(process.env.GITLAB_MIRROR_TOKEN_ROTATION_THRESHOLD_DAYS ?? 90) + + // vault + vaultToken = process.env.VAULT_TOKEN + vaultUrl = process.env.VAULT_URL + vaultInternalUrl = process.env.VAULT_INTERNAL_URL + + vaultKvName = process.env.VAULT_KV_NAME ?? 'forge-dso' + + // registry (harbor) + harborUrl = process.env.HARBOR_URL + harborInternalUrl = process.env.HARBOR_INTERNAL_URL + harborAdmin = process.env.HARBOR_ADMIN + harborAdminPassword = process.env.HARBOR_ADMIN_PASSWORD + harborRuleTemplate = process.env.HARBOR_RULE_TEMPLATE + harborRuleCount = process.env.HARBOR_RULE_COUNT + harborRetentionCron = process.env.HARBOR_RETENTION_CRON + + // nexus + nexusUrl = process.env.NEXUS_URL + nexusInternalUrl = process.env.NEXUS_INTERNAL_URL + nexusAdmin = process.env.NEXUS_ADMIN + nexusAdminPassword = process.env.NEXUS_ADMIN_PASSWORD + nexusSecretExposedUrl + = process.env.NEXUS__SECRET_EXPOSE_INTERNAL_URL === 'true' + ? process.env.NEXUS_INTERNAL_URL + : process.env.NEXUS_URL + + getInternalOrPublicGitlabUrl() { + return this.gitlabInternalUrl ?? this.gitlabUrl + } + + getInternalOrPublicVaultUrl() { + return this.vaultInternalUrl ?? this.vaultUrl + } + + getInternalOrPublicHarborUrl() { + return this.harborInternalUrl ?? this.harborUrl + } + + getInternalOrPublicNexusUrl() { + return this.nexusInternalUrl ?? this.nexusUrl + } + NODE_ENV = process.env.NODE_ENV === 'test' ? 'test' diff --git a/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts new file mode 100644 index 0000000000..ffe9d6a939 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-datastore.service.ts @@ -0,0 +1,63 @@ +import type { Prisma } from '@prisma/client' +import { Inject, Injectable } from '@nestjs/common' +import { PrismaService } from '../../cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + plugins: { + select: { + pluginName: true, + key: true, + value: true, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + helmValuesFiles: true, + deployRevision: true, + deployPath: true, + }, + }, + environments: { + select: { + id: true, + name: true, + clusterId: true, + cpu: true, + gpu: true, + memory: true, + autosync: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class ArgoCDDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd-health.service.ts b/apps/server-nestjs/src/modules/argocd/argocd-health.service.ts new file mode 100644 index 0000000000..f5959c3dd3 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd-health.service.ts @@ -0,0 +1,25 @@ +import { Inject, Injectable } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' + +@Injectable() +export class ArgoCDHealthService { + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(HealthIndicatorService) private readonly healthIndicator: HealthIndicatorService, + ) {} + + async check(key: string) { + const indicator = this.healthIndicator.check(key) + if (!this.config.argocdUrl) return indicator.down('Not configured') + + const url = new URL('/api/version', this.config.argocdUrl).toString() + try { + const response = await fetch(url) + if (response.status < 500) return indicator.up({ httpStatus: response.status }) + return indicator.down({ httpStatus: response.status }) + } catch (error) { + return indicator.down(error instanceof Error ? error.message : String(error)) + } + } +} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.module.ts b/apps/server-nestjs/src/modules/argocd/argocd.module.ts new file mode 100644 index 0000000000..b554561f94 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.module.ts @@ -0,0 +1,16 @@ +import { Module } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '../../cpin-module/infrastructure/infrastructure.module' +import { GitlabModule } from '../gitlab/gitlab.module' +import { VaultModule } from '../vault/vault.module' +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import { ArgoCDHealthService } from './argocd-health.service' +import { ArgoCDService } from './argocd.service' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule, GitlabModule, VaultModule], + providers: [HealthIndicatorService, ArgoCDHealthService, ArgoCDService, ArgoCDDatastoreService], + exports: [ArgoCDHealthService], +}) +export class ArgoCDModule {} diff --git a/apps/server-nestjs/src/modules/argocd/argocd.service.spec.ts b/apps/server-nestjs/src/modules/argocd/argocd.service.spec.ts new file mode 100644 index 0000000000..413ccd8b8a --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.service.spec.ts @@ -0,0 +1,294 @@ +import type { TestingModule } from '@nestjs/testing' +import type { Mocked } from 'vitest' +import type { ProjectWithDetails } from './argocd-datastore.service' +import { generateNamespaceName } from '@cpn-console/shared' +import { Test } from '@nestjs/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { stringify } from 'yaml' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { GitlabClientService } from '../gitlab/gitlab-client.service' +import { makeProjectSchema } from '../gitlab/gitlab-testing.utils' +import { VaultClientService } from '../vault/vault-client.service' +import { ArgoCDDatastoreService } from './argocd-datastore.service' +import { ArgoCDService } from './argocd.service' + +function createArgoCDControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + ArgoCDService, + { + provide: ArgoCDDatastoreService, + useValue: { + getAllProjects: vi.fn(), + } satisfies Partial, + }, + { + provide: ConfigurationService, + useValue: { + argoNamespace: 'argocd', + argocdUrl: 'https://argocd.internal', + argocdExtraRepositories: 'repo3', + dsoEnvChartVersion: 'dso-env-1.6.0', + dsoNsChartVersion: 'dso-ns-1.1.5', + } satisfies Partial, + }, + { + provide: GitlabClientService, + useValue: { + getOrCreateInfraGroupRepo: vi.fn(), + getOrCreateProjectGroupPublicUrl: vi.fn(), + getOrCreateInfraGroupRepoPublicUrl: vi.fn(), + generateCreateOrUpdateAction: vi.fn(), + maybeCreateCommit: vi.fn(), + listFiles: vi.fn(), + } satisfies Partial, + }, + { + provide: VaultClientService, + useValue: { + readProjectValues: vi.fn(), + } satisfies Partial, + }, + ], + }) +} + +describe('argoCDService', () => { + let service: ArgoCDService + let datastore: Mocked + let gitlab: Mocked + let vault: Mocked + + beforeEach(async () => { + vi.clearAllMocks() + const module: TestingModule = await createArgoCDControllerServiceTestingModule().compile() + service = module.get(ArgoCDService) + datastore = module.get(ArgoCDDatastoreService) + gitlab = module.get(GitlabClientService) + vault = module.get(VaultClientService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + it('should sync project environments', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [ + { id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + { id: '123e4567-e89b-12d3-a456-426614174002', name: 'prod', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + ], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + repositories: [ + { + id: 'repo-1', + internalRepoName: 'infra-repo', + isInfra: true, + deployRevision: 'HEAD', + deployPath: '.', + helmValuesFiles: '', + }, + ], + plugins: [{ pluginName: 'argocd', key: 'extraRepositories', value: 'repo2' }], + } satisfies ProjectWithDetails + + const infraProject = makeProjectSchema({ id: 100, http_url_to_repo: 'https://gitlab.internal/infra' }) + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlab.getOrCreateInfraGroupRepo.mockResolvedValue(infraProject) + gitlab.getOrCreateProjectGroupPublicUrl.mockResolvedValue('https://gitlab.internal/group') + gitlab.getOrCreateInfraGroupRepoPublicUrl.mockResolvedValue('https://gitlab.internal/infra-repo') + gitlab.listFiles.mockResolvedValue([]) + vault.readProjectValues.mockResolvedValue({ secret: 'value' }) + gitlab.generateCreateOrUpdateAction.mockImplementation(async (_repoId, _ref, filePath: string, content: string) => { + return { action: 'create', filePath, content } as any + }) + + await expect(service.handleCron()).resolves.not.toThrow() + + // Verify Gitlab calls + expect(gitlab.maybeCreateCommit).toHaveBeenCalledTimes(1) + expect(gitlab.maybeCreateCommit).toHaveBeenCalledWith( + infraProject, + 'ci: :robot_face: Sync project-1', + expect.arrayContaining([ + { + action: 'create', + content: stringify({ + common: { + 'dso/project': 'Project 1', + 'dso/project.id': '123e4567-e89b-12d3-a456-426614174000', + 'dso/project.slug': 'project-1', + 'dso/environment': 'dev', + 'dso/environment.id': '123e4567-e89b-12d3-a456-426614174001', + }, + argocd: { + cluster: 'in-cluster', + namespace: 'argocd', + project: 'project-1-dev-6293', + envChartVersion: 'dso-env-1.6.0', + nsChartVersion: 'dso-ns-1.1.5', + }, + environment: { + valueFileRepository: 'https://gitlab.internal/infra', + valueFileRevision: 'HEAD', + valueFilePath: 'Project 1/cluster-1/dev/values.yaml', + roGroup: '/project-project-1/console/dev/RO', + rwGroup: '/project-project-1/console/dev/RW', + }, + application: { + quota: { + cpu: 1, + gpu: 0, + memory: '1Gi', + }, + sourceRepositories: [ + 'https://gitlab.internal/group/**', + 'repo3', + 'repo2', + ], + destination: { + namespace: generateNamespaceName(mockProject.id, mockProject.environments[0].id), + name: 'cluster-1', + }, + autosync: true, + vault: { secret: 'value' }, + repositories: [ + { + repoURL: 'https://gitlab.internal/infra-repo', + targetRevision: 'HEAD', + path: '.', + valueFiles: [], + }, + ], + }, + }), + filePath: 'Project 1/cluster-1/dev/values.yaml', + }, + { + action: 'create', + content: stringify({ + common: { + 'dso/project': 'Project 1', + 'dso/project.id': '123e4567-e89b-12d3-a456-426614174000', + 'dso/project.slug': 'project-1', + 'dso/environment': 'prod', + 'dso/environment.id': '123e4567-e89b-12d3-a456-426614174002', + }, + argocd: { + cluster: 'in-cluster', + namespace: 'argocd', + project: 'project-1-prod-c626', + envChartVersion: 'dso-env-1.6.0', + nsChartVersion: 'dso-ns-1.1.5', + }, + environment: { + valueFileRepository: 'https://gitlab.internal/infra', + valueFileRevision: 'HEAD', + valueFilePath: 'Project 1/cluster-1/prod/values.yaml', + roGroup: '/project-project-1/console/prod/RO', + rwGroup: '/project-project-1/console/prod/RW', + }, + application: { + quota: { + cpu: 1, + gpu: 0, + memory: '1Gi', + }, + sourceRepositories: [ + 'https://gitlab.internal/group/**', + 'repo3', + 'repo2', + ], + destination: { + namespace: generateNamespaceName(mockProject.id, mockProject.environments[1].id), + name: 'cluster-1', + }, + autosync: true, + vault: { secret: 'value' }, + repositories: [ + { + repoURL: 'https://gitlab.internal/infra-repo', + targetRevision: 'HEAD', + path: '.', + valueFiles: [], + }, + ], + }, + }), + filePath: 'Project 1/cluster-1/prod/values.yaml', + }, + ]), + ) + + expect(gitlab.listFiles).toHaveBeenCalledWith(infraProject, { + path: 'Project 1/', + recursive: true, + }) + + expect(gitlab.generateCreateOrUpdateAction).toHaveBeenCalledTimes(2) + }) + + it('should delete values file when an environment is removed', async () => { + const mockProject = { + id: '123e4567-e89b-12d3-a456-426614174000', + slug: 'project-1', + name: 'Project 1', + environments: [ + { id: '123e4567-e89b-12d3-a456-426614174001', name: 'dev', clusterId: 'c1', cpu: 1, gpu: 0, memory: 1, autosync: true }, + ], + clusters: [ + { id: 'c1', label: 'cluster-1', zone: { slug: 'zone-1' } }, + ], + repositories: [ + { + id: 'repo-1', + internalRepoName: 'infra-repo', + isInfra: true, + deployRevision: 'HEAD', + deployPath: '.', + helmValuesFiles: '', + }, + ], + plugins: [], + } satisfies ProjectWithDetails + + const infraProject = makeProjectSchema({ id: 100, http_url_to_repo: 'https://gitlab.internal/infra' }) + datastore.getAllProjects.mockResolvedValue([mockProject]) + gitlab.getOrCreateInfraGroupRepo.mockResolvedValue(infraProject) + gitlab.getOrCreateProjectGroupPublicUrl.mockResolvedValue('https://gitlab.internal/group') + gitlab.getOrCreateInfraGroupRepoPublicUrl.mockResolvedValue('https://gitlab.internal/infra-repo') + gitlab.listFiles.mockResolvedValue([ + { name: 'values.yaml', path: 'Project 1/cluster-1/dev/values.yaml' }, + { name: 'values.yaml', path: 'Project 1/cluster-1/prod/values.yaml' }, + ] as any) + vault.readProjectValues.mockResolvedValue({ secret: 'value' }) + gitlab.generateCreateOrUpdateAction.mockImplementation(async (_repoId, _ref, filePath: string, content: string) => { + return { action: 'create', filePath, content } as any + }) + + await expect(service.handleCron()).resolves.not.toThrow() + + expect(gitlab.maybeCreateCommit).toHaveBeenCalledTimes(1) + expect(gitlab.maybeCreateCommit).toHaveBeenCalledWith( + infraProject, + 'ci: :robot_face: Sync project-1', + expect.arrayContaining([ + expect.objectContaining({ + action: 'create', + filePath: 'Project 1/cluster-1/dev/values.yaml', + }), + { + action: 'delete', + filePath: 'Project 1/cluster-1/prod/values.yaml', + }, + ]), + ) + + expect(gitlab.generateCreateOrUpdateAction).toHaveBeenCalledTimes(1) + }) +}) diff --git a/apps/server-nestjs/src/modules/argocd/argocd.service.ts b/apps/server-nestjs/src/modules/argocd/argocd.service.ts new file mode 100644 index 0000000000..00cb7505f4 --- /dev/null +++ b/apps/server-nestjs/src/modules/argocd/argocd.service.ts @@ -0,0 +1,474 @@ +import type { CommitAction, CondensedProjectSchema, SimpleProjectSchema } from '@gitbeaker/core' +import type { ProjectWithDetails } from './argocd-datastore.service' +import { createHmac } from 'node:crypto' +import { generateNamespaceName, inClusterLabel } from '@cpn-console/shared' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { trace } from '@opentelemetry/api' +import { stringify } from 'yaml' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { StartActiveSpan } from '../../cpin-module/infrastructure/telemetry/telemetry.decorator' +import { GitlabClientService } from '../gitlab/gitlab-client.service' +import { VaultClientService } from '../vault/vault-client.service' +import { ArgoCDDatastoreService } from './argocd-datastore.service' + +@Injectable() +export class ArgoCDService { + private readonly logger = new Logger(ArgoCDService.name) + + constructor( + @Inject(ArgoCDDatastoreService) private readonly argoCDDatastore: ArgoCDDatastoreService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(GitlabClientService) private readonly gitlab: GitlabClientService, + @Inject(VaultClientService) private readonly vault: VaultClientService, + ) { + this.logger.log('ArgoCDService initialized') + } + + @OnEvent('project.upsert') + @StartActiveSpan() + async handleUpsert(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project upsert event for ${project.slug}`) + await this.ensureProject(project) + this.logger.log(`ArgoCD sync completed for project ${project.slug}`) + } + + @OnEvent('project.delete') + @StartActiveSpan() + async handleDelete(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project delete event for ${project.slug}`) + await this.ensureProject(project) + this.logger.log(`ArgoCD sync completed for project ${project.slug}`) + } + + @Cron(CronExpression.EVERY_HOUR) + @StartActiveSpan() + async handleCron() { + this.logger.log('Starting ArgoCD reconciliation') + const projects = await this.argoCDDatastore.getAllProjects() + const span = trace.getActiveSpan() + span?.setAttribute('argocd.projects.count', projects.length) + this.logger.log(`Loaded ${projects.length} projects for ArgoCD reconciliation`) + await this.ensureProjects(projects) + this.logger.log(`ArgoCD reconciliation completed (${projects.length})`) + } + + @StartActiveSpan() + private async ensureProjects(projects: ProjectWithDetails[]) { + const span = trace.getActiveSpan() + span?.setAttribute('argocd.projects.count', projects.length) + this.logger.verbose(`Reconciling ArgoCD projects (count=${projects.length})`) + await Promise.all(projects.map(project => this.ensureProject(project))) + } + + @StartActiveSpan() + private async ensureProject(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.verbose(`Reconciling ArgoCD project ${project.slug}`) + await this.ensureZones(project) + this.logger.verbose(`ArgoCD project reconciled (${project.slug})`) + } + + @StartActiveSpan() + private async ensureZones( + project: ProjectWithDetails, + ): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + const zones = getDistinctZones(project) + span?.setAttribute('argocd.zones.count', zones.length) + this.logger.verbose(`Reconciling ArgoCD zones for project ${project.slug} (count=${zones.length})`) + await Promise.all(zones.map(zoneSlug => this.ensureZone(project, zoneSlug))) + } + + @StartActiveSpan() + private async ensureZone( + project: ProjectWithDetails, + zoneSlug: string, + ): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + const infraProject = await this.gitlab.getOrCreateInfraGroupRepo(zoneSlug) + span?.setAttributes({ + 'argocd.repo.id': infraProject.id, + 'argocd.repo.path': infraProject.path_with_namespace, + 'zone.slug': zoneSlug, + }) + this.logger.verbose(`Reconciling ArgoCD zone for project ${project.slug} in zone ${zoneSlug} (repoId=${infraProject.id})`) + + const environmentActions = await this.generateEnvironmentsUpdateActions( + project, + project.environments, + infraProject, + zoneSlug, + ) + const purgeEnvironmentActions = await this.generatePurgeEnvironmentActions( + project, + infraProject, + zoneSlug, + ) + const actions: CommitAction[] = [ + ...environmentActions, + ...purgeEnvironmentActions, + ] + + span?.setAttribute('argocd.repo.actions.count', actions.length) + if (actions.length === 0) { + this.logger.verbose(`No ArgoCD changes need to be committed for project ${project.slug} in zone ${zoneSlug}`) + } else { + this.logger.log(`Applying ArgoCD changes for project ${project.slug} in zone ${zoneSlug} (actions=${actions.length})`) + } + await this.gitlab.maybeCreateCommit(infraProject, `ci: :robot_face: Sync ${project.slug}`, actions) + } + + private async generatePurgeEnvironmentActions( + project: ProjectWithDetails, + infraProject: CondensedProjectSchema, + zoneSlug: string, + ): Promise { + const neededFiles = new Set() + const clusterLabelsInZone = new Set( + project.clusters + .filter(c => c.zone.slug === zoneSlug) + .map(c => c.label), + ) + + project.environments.forEach((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId) + if (cluster?.zone.slug !== zoneSlug) return + neededFiles.add(formatEnvironmentValuesFilePath(project, cluster, env)) + }) + + const existingFiles = await this.gitlab.listFiles(infraProject, { + path: `${project.name}/`, + recursive: true, + }) + + const projectPrefix = `${project.name}/` + const actions = existingFiles + .filter((existingFile) => { + if (existingFile.name !== 'values.yaml') return false + if (!existingFile.path.startsWith(projectPrefix)) return false + + const remaining = existingFile.path.slice(projectPrefix.length) + const clusterLabel = remaining.split('/')[0] + if (!clusterLabel || !clusterLabelsInZone.has(clusterLabel)) return false + + return !neededFiles.has(existingFile.path) + }) + .map(existingFile => ({ action: 'delete', filePath: existingFile.path } satisfies CommitAction)) + + this.logger.verbose(`Computed ArgoCD purge actions for project ${project.slug} in zone ${zoneSlug} (actions=${actions.length})`) + + return actions + } + + private async generateEnvironmentsUpdateActions( + project: ProjectWithDetails, + environments: ProjectWithDetails['environments'], + infraProject: SimpleProjectSchema, + zoneSlug: string, + ): Promise { + this.logger.verbose(`Computing ArgoCD environment actions for project ${project.slug} in zone ${zoneSlug} (environments=${environments.length})`) + const actions = await Promise.all( + environments + .filter((env) => { + const cluster = project.clusters.find(c => c.id === env.clusterId) + return cluster?.zone.slug === zoneSlug + }) + .map(env => this.generateEnvironmentUpdateAction(project, env, infraProject)), + ) + const filteredActions = actions.filter(a => !!a) as CommitAction[] + this.logger.verbose(`Computed ArgoCD environment actions for project ${project.slug} in zone ${zoneSlug} (actions=${filteredActions.length})`) + return filteredActions + } + + private async generateEnvironmentUpdateAction( + project: ProjectWithDetails, + environment: ProjectWithDetails['environments'][number], + infraProject: SimpleProjectSchema, + ): Promise { + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': project.slug, + 'environment.id': environment.id, + 'environment.name': environment.name, + }) + const vaultValues = await this.vault.readProjectValues(project.id) ?? {} + const cluster = project.clusters.find(c => c.id === environment.clusterId) + if (!cluster) { + this.logger.warn(`Cluster not found for environment ${environment.id} in project ${project.slug}`) + throw new Error(`Cluster not found for environment ${environment.id}`) + } + span?.setAttribute('zone.slug', cluster.zone.slug) + + const valueFilePath = formatEnvironmentValuesFilePath(project, cluster, environment) + + const repo = project.repositories.find(r => r.isInfra) + if (!repo) { + this.logger.warn(`Infrastructure repository not found for project ${project.slug} (projectId=${project.id})`) + throw new Error(`Infra repository not found for project ${project.id}`) + } + const repoUrl = await this.gitlab.getOrCreateInfraGroupRepoPublicUrl(repo.internalRepoName) + + const values = formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl: await this.gitlab.getOrCreateProjectGroupPublicUrl(), + argocdExtraRepositories: this.config.argocdExtraRepositories, + infraProject, + valueFilePath, + repoUrl, + vaultValues, + argoNamespace: this.config.argoNamespace, + envChartVersion: this.config.dsoEnvChartVersion, + nsChartVersion: this.config.dsoNsChartVersion, + }) + + return this.gitlab.generateCreateOrUpdateAction( + infraProject, + 'main', + valueFilePath, + stringify(values), + ) + } +} + +interface ValuesSchema { + common: { + 'dso/project': string + 'dso/project.id': string + 'dso/project.slug': string + 'dso/environment': string + 'dso/environment.id': string + } + argocd: { + cluster: string + namespace: string + project: string + envChartVersion: string + nsChartVersion: string + } + environment: { + valueFileRepository: string + valueFileRevision: string + valueFilePath: string + roGroup: string + rwGroup: string + } + application: { + quota: { + cpu: number + gpu: number + memory: string + } + sourceRepositories: string[] + destination: { + namespace: string + name: string + } + autosync: boolean + vault: Record + repositories: { + repoURL: string + targetRevision: string + path: string + valueFiles: string[] + }[] + } +} + +function formatReadOnlyGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RO` +} + +function formatReadWriteGroupName(projectSlug: string, environmentName: string) { + return `/project-${projectSlug}/console/${environmentName}/RW` +} + +function formatAppProjectName(projectSlug: string, env: string) { + const envHash = createHmac('sha256', '') + .update(env) + .digest('hex') + .slice(0, 4) + return `${projectSlug}-${env}-${envHash}` +} + +function formatEnvironmentValuesFilePath(project: { name: string }, cluster: { label: string }, env: { name: string }): string { + return `${project.name}/${cluster.label}/${env.name}/values.yaml` +} + +function getDistinctZones(project: ProjectWithDetails) { + const zones = new Set() + project.clusters.forEach(c => zones.add(c.zone.slug)) + return [...zones] +} + +function splitExtraRepositories(extraRepositories: string | undefined): string[] { + if (!extraRepositories) return [] + return extraRepositories.split(',').map(r => r.trim()).filter(r => r.length > 0) +} + +function formatRepositoriesValues( + repositories: ProjectWithDetails['repositories'], + repoUrl: string, + envName: string, +) { + return repositories + .filter(repo => repo.isInfra) + .map((repository) => { + const valueFiles = splitExtraRepositories(repository.helmValuesFiles?.replaceAll('', envName)) + return { + repoURL: repoUrl, + targetRevision: repository.deployRevision || 'HEAD', + path: repository.deployPath || '.', + valueFiles, + } satisfies ValuesSchema['application']['repositories'][number] + }) +} + +function formatEnvironmentValues( + infraProject: SimpleProjectSchema, + valueFilePath: string, + roGroup: string, + rwGroup: string, +) { + return { + valueFileRepository: infraProject.http_url_to_repo, + valueFileRevision: 'HEAD', + valueFilePath, + roGroup, + rwGroup, + } satisfies ValuesSchema['environment'] +} + +interface FormatSourceRepositoriesValuesOptions { + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + projectPlugins?: ProjectWithDetails['plugins'] +} + +function formatSourceRepositoriesValues( + { gitlabPublicGroupUrl, argocdExtraRepositories, projectPlugins }: FormatSourceRepositoriesValuesOptions, +): string[] { + let projectExtraRepositories = '' + if (projectPlugins) { + const argocdPlugin = projectPlugins.find(p => p.pluginName === 'argocd' && p.key === 'extraRepositories') + if (argocdPlugin) projectExtraRepositories = argocdPlugin.value + } + + return [ + `${gitlabPublicGroupUrl}/**`, + ...splitExtraRepositories(argocdExtraRepositories), + ...splitExtraRepositories(projectExtraRepositories), + ] +} + +interface FormatCommonOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] +} + +function formatCommon({ project, environment }: FormatCommonOptions) { + return { + 'dso/project': project.name, + 'dso/project.id': project.id, + 'dso/project.slug': project.slug, + 'dso/environment': environment.name, + 'dso/environment.id': environment.id, + } satisfies ValuesSchema['common'] +} + +interface FormatArgoCDValuesOptions { + namespace: string + project: string + envChartVersion: string + nsChartVersion: string +} + +function formatArgoCDValues(options: FormatArgoCDValuesOptions) { + const { namespace, project, envChartVersion, nsChartVersion } = options + return { + cluster: inClusterLabel, + namespace, + project, + envChartVersion, + nsChartVersion, + } satisfies ValuesSchema['argocd'] +} + +interface FormatValuesOptions { + project: ProjectWithDetails + environment: ProjectWithDetails['environments'][number] + cluster: ProjectWithDetails['clusters'][number] + gitlabPublicGroupUrl: string + argocdExtraRepositories?: string + vaultValues: Record + infraProject: SimpleProjectSchema + valueFilePath: string + repoUrl: string + argoNamespace: string + envChartVersion: string + nsChartVersion: string +} + +function formatValues({ + project, + environment, + cluster, + gitlabPublicGroupUrl, + argocdExtraRepositories, + vaultValues, + infraProject, + valueFilePath, + repoUrl, + argoNamespace, + envChartVersion, + nsChartVersion, +}: FormatValuesOptions) { + return { + common: formatCommon({ project, environment }), + argocd: formatArgoCDValues({ + namespace: argoNamespace, + project: formatAppProjectName(project.slug, environment.name), + envChartVersion, + nsChartVersion, + }), + environment: formatEnvironmentValues( + infraProject, + valueFilePath, + formatReadOnlyGroupName(project.slug, environment.name), + formatReadWriteGroupName(project.slug, environment.name), + ), + application: { + quota: { + cpu: environment.cpu, + gpu: environment.gpu, + memory: `${environment.memory}Gi`, + }, + sourceRepositories: formatSourceRepositoriesValues({ + gitlabPublicGroupUrl, + argocdExtraRepositories, + projectPlugins: project.plugins, + }), + destination: { + namespace: generateNamespaceName(project.id, environment.id), + name: cluster.label, + }, + autosync: environment.autosync, + vault: vaultValues, + repositories: formatRepositoriesValues( + project.repositories, + repoUrl, + environment.name, + ), + }, + } satisfies ValuesSchema +} diff --git a/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml b/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml new file mode 100644 index 0000000000..ca9be29847 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/files/.gitlab-ci.yml @@ -0,0 +1,22 @@ +variables: + PROJECT_NAME: + description: Nom du dépôt (dans ce Gitlab) à synchroniser. + GIT_BRANCH_DEPLOY: + description: Nom de la branche à synchroniser. + value: main + SYNC_ALL: + description: Synchroniser toutes les branches. + value: "false" + +include: + - project: $CATALOG_PATH + file: mirror.yml + ref: main + +repo_pull_sync: + extends: .repo_pull_sync + only: + - api + - triggers + - web + - schedules diff --git a/apps/server-nestjs/src/modules/gitlab/files/mirror.sh b/apps/server-nestjs/src/modules/gitlab/files/mirror.sh new file mode 100644 index 0000000000..c50c923f8f --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/files/mirror.sh @@ -0,0 +1,83 @@ +#!/bin/bash + +set -e + +# Colorize terminal +red='\\e[0;31m' +no_color='\\033[0m' + +# Console step increment +i=1 + +# Default values +BRANCH_TO_SYNC=main + +print_help() { + TEXT_HELPER="\\nThis script aims to send a synchronization request to DSO.\\nFollowing flags are available: + -a Api url to send the synchronization request + -b Branch which is wanted to be synchronize for the given repository (default '$BRANCH_TO_SYNC') + -g GitLab token to trigger the pipeline on the gitlab mirror project + -i Gitlab mirror project id + -r Gitlab repository name to mirror + -h Print script help\\n" + printf "$TEXT_HELPER" +} + +print_args() { + printf "\\nArguments received: + -a API_URL: $API_URL + -b BRANCH_TO_SYNC: $BRANCH_TO_SYNC + -g GITLAB_TRIGGER_TOKEN length: \${#GITLAB_TRIGGER_TOKEN} + -i GITLAB_MIRROR_PROJECT_ID: $GITLAB_MIRROR_PROJECT_ID + -r REPOSITORY_NAME: $REPOSITORY_NAME\\n" +} + +# Parse options +while getopts :ha:b:g:i:r: flag +do + case "\${flag}" in + a) + API_URL=\${OPTARG};; + b) + BRANCH_TO_SYNC=\${OPTARG};; + g) + GITLAB_TRIGGER_TOKEN=\${OPTARG};; + i) + GITLAB_MIRROR_PROJECT_ID=\${OPTARG};; + r) + REPOSITORY_NAME=\${OPTARG};; + h) + printf "\\nHelp requested.\\n" + print_help + printf "\\nExiting.\\n" + exit 0;; + *) + printf "\\nInvalid argument \${OPTARG} (\${flag}).\\n" + print_help + print_args + exit 1;; + esac +done + +# Test if arguments are missing +if [ -z \${API_URL} ] || [ -z \${BRANCH_TO_SYNC} ] || [ -z \${GITLAB_TRIGGER_TOKEN} ] || [ -z \${GITLAB_MIRROR_PROJECT_ID} ] || [ -z \${REPOSITORY_NAME} ]; then + printf "\\nArgument(s) missing!\\n" + print_help + print_args + exit 2 +fi + +# Print arguments +print_args + +# Send synchronization request +printf "\\n\${red}\${i}.\${no_color} Send request to DSO api.\\n\\n" + +curl \\ + -X POST \\ + --fail \\ + -F token=\${GITLAB_TRIGGER_TOKEN} \\ + -F ref=main \\ + -F variables[GIT_BRANCH_DEPLOY]=\${BRANCH_TO_SYNC} \\ + -F variables[PROJECT_NAME]=\${REPOSITORY_NAME} \\ + "\${API_URL}/api/v4/projects/\${GITLAB_MIRROR_PROJECT_ID}/trigger/pipeline" diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.spec.ts new file mode 100644 index 0000000000..3f7564298b --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.spec.ts @@ -0,0 +1,600 @@ +import type { ExpandedGroupSchema, Gitlab as GitlabApi, ProjectSchema } from '@gitbeaker/core' +import type { TestingModule } from '@nestjs/testing' +import type { MockedFunction } from 'vitest' +import { Test } from '@nestjs/testing' +import { beforeEach, describe, expect, it } from 'vitest' +import { mockDeep, mockReset } from 'vitest-mock-extended' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { GITLAB_REST_CLIENT, GitlabClientService } from './gitlab-client.service' +import { + makeAccessTokenExposedSchema, + makeAccessTokenSchema, + makeExpandedGroupSchema, + makeExpandedUserSchema, + makeGitbeakerRequestError, + makeGroupSchema, + makeMemberSchema, + makeOffsetPagination, + makePipelineTriggerToken, + makeProjectSchema, + makeRepositoryFileExpandedSchema, + makeRepositoryTreeSchema, +} from './gitlab-testing.utils' +import { + INFRA_GROUP_CUSTOM_ATTRIBUTE_KEY, + MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, + PROJECT_GROUP_CUSTOM_ATTRIBUTE_KEY, + USER_ID_CUSTOM_ATTRIBUTE_KEY, +} from './gitlab.constants' + +const gitlabMock = mockDeep() + +function createGitlabClientServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + GitlabClientService, + { + provide: GITLAB_REST_CLIENT, + useValue: gitlabMock, + }, + { + provide: ConfigurationService, + useValue: { + gitlabUrl: 'https://gitlab.internal', + gitlabToken: 'token', + gitlabInternalUrl: 'https://gitlab.internal', + projectRootDir: 'forge', + gitlabMirrorTokenExpirationDays: 30, + getInternalOrPublicGitlabUrl: () => 'https://gitlab.internal', + } satisfies Partial, + }, + ], + }) +} + +describe('gitlab-client', () => { + let service: GitlabClientService + + beforeEach(async () => { + mockReset(gitlabMock) + const module: TestingModule = await createGitlabClientServiceTestingModule().compile() + service = module.get(GitlabClientService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('getOrCreateInfraProject', () => { + it('should create infra project if not exists', async () => { + const zoneSlug = 'zone-1' + const rootId = 123 + const infraGroupId = 456 + const projectId = 789 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + + gitlabMock.Groups.show.mockResolvedValueOnce({ id: rootId, full_path: 'forge' } as ExpandedGroupSchema) + + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + gitlabMock.Groups.create.mockResolvedValue({ id: infraGroupId, full_path: 'forge/infra' } as ExpandedGroupSchema) + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + gitlabMock.Projects.create.mockResolvedValue({ + id: projectId, + path_with_namespace: 'forge/infra/zone-1', + http_url_to_repo: 'https://gitlab.internal/infra/zone-1.git', + } as ProjectSchema) + + const result = await service.getOrCreateInfraGroupRepo(zoneSlug) + + expect(result).toEqual({ + id: projectId, + http_url_to_repo: 'https://gitlab.internal/infra/zone-1.git', + path_with_namespace: 'forge/infra/zone-1', + }) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith('infra', 'infra', expect.any(Object)) + expect(gitlabMock.GroupCustomAttributes.set).toHaveBeenCalledWith(infraGroupId, MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, 'true') + expect(gitlabMock.GroupCustomAttributes.set).toHaveBeenCalledWith(infraGroupId, INFRA_GROUP_CUSTOM_ATTRIBUTE_KEY, 'true') + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: zoneSlug, + path: zoneSlug, + namespaceId: infraGroupId, + })) + }) + }) + + describe('commitCreateOrUpdate', () => { + it('should create commit if file not exists', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const content = 'content' + const filePath = 'file.txt' + const message = 'ci: :robot_face: Update file content' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + const notFoundError = makeGitbeakerRequestError({ description: '404 File Not Found' }) + gitlabRepositoryFilesShowMock.mockRejectedValue(notFoundError) + + const action = await service.generateCreateOrUpdateAction(repo, 'main', filePath, content) + await service.maybeCreateCommit(repo, message, action ? [action] : []) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + message, + [{ action: 'create', filePath, content }], + ) + }) + + it('should update commit if content differs', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const content = 'new content' + const filePath = 'file.txt' + const oldHash = 'oldhash' + const message = 'ci: :robot_face: Update file content' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + gitlabRepositoryFilesShowMock.mockResolvedValue(makeRepositoryFileExpandedSchema({ content_sha256: oldHash })) + + const action = await service.generateCreateOrUpdateAction(repo, 'main', filePath, content) + await service.maybeCreateCommit(repo, message, action ? [action] : []) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + message, + [{ action: 'update', filePath, content }], + ) + }) + + it('should do nothing if content matches', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const content = 'content' + const filePath = 'file.txt' + const hash = 'ed7002b439e9ac845f22357d822bac1444730fbdb6016d3ec9432297b9ec9f73' + const message = 'ci: :robot_face: Update file content' + + const gitlabRepositoryFilesShowMock = gitlabMock.RepositoryFiles.show as MockedFunction + gitlabRepositoryFilesShowMock.mockResolvedValue(makeRepositoryFileExpandedSchema({ content_sha256: hash })) + + const action = await service.generateCreateOrUpdateAction(repo, 'main', filePath, content) + await service.maybeCreateCommit(repo, message, action ? [action] : []) + + expect(gitlabMock.Commits.create).not.toHaveBeenCalled() + }) + }) + + describe('getOrCreateProjectGroup', () => { + it('should create project group if not exists', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce(makeExpandedGroupSchema({ id: rootId, full_path: 'forge' })) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.create.mockResolvedValue(makeExpandedGroupSchema({ id: groupId, name: projectSlug, path: projectSlug, full_path: `forge/${projectSlug}` })) + + const result = await service.getOrCreateProjectSubGroup(projectSlug) + + expect(result).toEqual(expect.objectContaining({ id: groupId, name: projectSlug })) + expect(gitlabMock.Groups.create).toHaveBeenCalledWith(projectSlug, projectSlug, expect.objectContaining({ + parentId: rootId, + })) + expect(gitlabMock.GroupCustomAttributes.set).toHaveBeenCalledWith(groupId, MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, 'true') + expect(gitlabMock.GroupCustomAttributes.set).toHaveBeenCalledWith(groupId, PROJECT_GROUP_CUSTOM_ATTRIBUTE_KEY, projectSlug) + }) + + it('should return existing group', async () => { + const projectSlug = 'project-1' + const rootId = 123 + const groupId = 456 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce(makeExpandedGroupSchema({ id: rootId, full_path: 'forge' })) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: rootId, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectSubGroup(projectSlug) + + expect(result).toEqual({ id: groupId, name: projectSlug, parent_id: rootId, full_path: 'forge/project-1' }) + expect(gitlabMock.Groups.create).not.toHaveBeenCalled() + }) + }) + + describe('repositories', () => { + it('should return internal repo url', async () => { + const projectSlug = 'project-1' + const repoName = 'repo-1' + const rootId = 123 + const groupId = 1 + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectGroupInternalRepoUrl(projectSlug, repoName) + expect(result).toBe('https://gitlab.internal/forge/project-1/repo-1.git') + }) + + it('should upsert mirror repo', async () => { + const projectSlug = 'project-1' + const repoId = 1 + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValue({ + data: [{ id: repoId, path_with_namespace: 'forge/project-1/mirror' }], + paginationInfo: { next: null }, + }) + + gitlabMock.Projects.edit.mockResolvedValue({ id: repoId, name: 'mirror' } as ProjectSchema) + + const result = await service.upsertProjectMirrorRepo(projectSlug) + + expect(result).toEqual({ id: repoId, name: 'mirror' }) + expect(gitlabMock.Projects.edit).toHaveBeenCalledWith(repoId, expect.objectContaining({ + name: 'mirror', + path: 'mirror', + })) + }) + + it('should set managed custom attribute when upserting a project repo', async () => { + const projectSlug = 'project-1' + const repoName = 'repo-1' + const repoId = 101 + + gitlabMock.Projects.show.mockResolvedValue(makeProjectSchema({ id: repoId })) + gitlabMock.Projects.edit.mockResolvedValue({ id: repoId, name: repoName } as ProjectSchema) + + const result = await service.upsertProjectGroupRepo(projectSlug, repoName, 'desc') + + expect(result).toEqual({ id: repoId, name: repoName }) + expect(gitlabMock.ProjectCustomAttributes.set).toHaveBeenCalledWith(repoId, MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, 'true') + }) + + describe('upsertUser', () => { + it('should create user and set custom attribute if not exists', async () => { + const consoleUser = { id: 'u1', email: 'new@example.com', firstName: 'New', lastName: 'User' } + const gitlabUsersAllMock = gitlabMock.Users.all as MockedFunction + gitlabUsersAllMock.mockResolvedValue([]) + gitlabMock.Users.create.mockResolvedValue(makeExpandedUserSchema({ id: 999, email: consoleUser.email })) + + const result = await service.upsertUser(consoleUser) + + expect(result).toEqual(expect.objectContaining({ id: 999, email: consoleUser.email })) + expect(gitlabMock.UserCustomAttributes.set).toHaveBeenCalledWith(999, USER_ID_CUSTOM_ATTRIBUTE_KEY, consoleUser.id) + }) + + it('should set custom attribute if user exists', async () => { + const consoleUser = { id: 'u1', email: 'existing@example.com', firstName: 'Existing', lastName: 'User' } + const gitlabUsersAllMock = gitlabMock.Users.all as MockedFunction + gitlabUsersAllMock.mockResolvedValue([makeExpandedUserSchema({ id: 1000, email: consoleUser.email })]) + + const result = await service.upsertUser(consoleUser) + + expect(result).toEqual(expect.objectContaining({ id: 1000, email: consoleUser.email })) + expect(gitlabMock.UserCustomAttributes.set).toHaveBeenCalledWith(1000, USER_ID_CUSTOM_ATTRIBUTE_KEY, consoleUser.id) + expect(gitlabMock.Users.create).not.toHaveBeenCalled() + }) + }) + + it('should create pipeline trigger token if not exists', async () => { + const projectSlug = 'project-1' + const repoId = 1 + const tokenDescription = 'mirroring-from-external-repo' + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValue({ + data: [{ id: repoId, path_with_namespace: 'forge/project-1/mirror' }], + paginationInfo: { next: null }, + }) + gitlabMock.Projects.edit.mockResolvedValue({ id: repoId, name: 'mirror' } as ProjectSchema) + + const gitlabPipelineTriggerTokensAllMock = gitlabMock.PipelineTriggerTokens.all as MockedFunction + gitlabPipelineTriggerTokensAllMock.mockResolvedValue({ + data: [], + paginationInfo: makeOffsetPagination({ next: null }), + }) + + gitlabMock.PipelineTriggerTokens.create.mockResolvedValue(makePipelineTriggerToken({ id: 2, description: tokenDescription })) + + const result = await service.getOrCreateMirrorPipelineTriggerToken(projectSlug) + + expect(result).toEqual(expect.objectContaining({ id: 2, description: tokenDescription })) + expect(gitlabMock.PipelineTriggerTokens.create).toHaveBeenCalledWith(repoId, tokenDescription) + }) + }) + + describe('group Members', () => { + it('should get group members', async () => { + const groupId = 1 + const group = makeGroupSchema({ id: groupId }) + const members = [makeMemberSchema({ id: 1, name: 'user' })] + const gitlabGroupMembersAllMock = gitlabMock.GroupMembers.all as MockedFunction + gitlabGroupMembersAllMock.mockResolvedValue(members) + + const result = await service.getGroupMembers(group) + expect(result).toEqual(members) + expect(gitlabMock.GroupMembers.all).toHaveBeenCalledWith(groupId) + }) + + it('should add group member', async () => { + const groupId = 1 + const group = makeGroupSchema({ id: groupId }) + const userId = 2 + const accessLevel = 30 + gitlabMock.GroupMembers.add.mockResolvedValue(makeMemberSchema({ id: userId })) + + await service.addGroupMember(group, userId, accessLevel) + expect(gitlabMock.GroupMembers.add).toHaveBeenCalledWith(groupId, userId, accessLevel) + }) + + it('should remove group member', async () => { + const groupId = 1 + const group = makeGroupSchema({ id: groupId }) + const userId = 2 + gitlabMock.GroupMembers.remove.mockResolvedValue(undefined) + + await service.removeGroupMember(group, userId) + expect(gitlabMock.GroupMembers.remove).toHaveBeenCalledWith(groupId, userId) + }) + }) + + describe('createProjectMirrorAccessToken', () => { + it('should create project access token with correct scopes', async () => { + const projectSlug = 'project-1' + const groupId = 456 + const tokenName = `${projectSlug}-bot` + const token = makeAccessTokenExposedSchema({ id: 1, name: tokenName, token: 'secret-token' }) + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: 123, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: 123, full_path: 'forge' } as ExpandedGroupSchema) + + const gitlabGroupsAllSubgroupsMock = gitlabMock.Groups.allSubgroups as MockedFunction + gitlabGroupsAllSubgroupsMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: 123, full_path: 'forge/project-1' }], + paginationInfo: { next: null }, + }) + + gitlabMock.GroupAccessTokens.create.mockResolvedValue(token) + + const result = await service.createMirrorAccessToken(projectSlug) + + expect(result).toEqual(token) + expect(gitlabMock.GroupAccessTokens.create).toHaveBeenCalledWith( + groupId, + tokenName, + ['write_repository', 'read_repository', 'read_api'], + expect.any(String), + ) + }) + }) + + describe('getOrCreateProjectGroupRepo', () => { + it('should return existing repo', async () => { + const subGroupPath = 'project-1' + const repoName = 'repo-1' + const fullPath = `${subGroupPath}/${repoName}` + const projectId = 789 + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [{ id: projectId, path_with_namespace: `forge/${fullPath}` }], + paginationInfo: { next: null }, + }) + + const result = await service.getOrCreateProjectGroupRepo(fullPath) + + expect(result).toEqual(expect.objectContaining({ id: projectId })) + }) + + it('should create repo if not exists', async () => { + const subGroupPath = 'project-1' + const repoName = 'repo-1' + const fullPath = `${subGroupPath}/${repoName}` + const projectId = 789 + const groupId = 456 + const rootId = 123 + + const gitlabProjectsAllMock = gitlabMock.Projects.all as MockedFunction + gitlabProjectsAllMock.mockResolvedValueOnce({ + data: [], + paginationInfo: { next: null }, + }) + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: rootId, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: subGroupPath, parent_id: rootId, full_path: `forge/${subGroupPath}` }], + paginationInfo: { next: null }, + }) + + gitlabMock.Projects.create.mockResolvedValue({ id: projectId, name: repoName } as ProjectSchema) + + const result = await service.getOrCreateProjectGroupRepo(fullPath) + + expect(result).toEqual(expect.objectContaining({ id: projectId })) + expect(gitlabMock.Projects.create).toHaveBeenCalledWith(expect.objectContaining({ + name: repoName, + path: repoName, + namespaceId: groupId, + })) + }) + }) + + describe('getFile', () => { + it('should return file content', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const filePath = 'file.txt' + const ref = 'main' + const file = makeRepositoryFileExpandedSchema({ content: 'content' }) + + gitlabMock.RepositoryFiles.show.mockResolvedValue(file) + + const result = await service.getFile(repo, filePath, ref) + expect(result).toEqual(file) + }) + + it('should return undefined on 404', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const filePath = 'file.txt' + const ref = 'main' + const error = makeGitbeakerRequestError({ description: '404 File Not Found' }) + + gitlabMock.RepositoryFiles.show.mockRejectedValue(error) + + const result = await service.getFile(repo, filePath, ref) + expect(result).toBeUndefined() + }) + + it('should throw on other errors', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const filePath = 'file.txt' + const ref = 'main' + const error = new Error('Some other error') + + gitlabMock.RepositoryFiles.show.mockRejectedValue(error) + + await expect(service.getFile(repo, filePath, ref)).rejects.toThrow(error) + }) + }) + + describe('listFiles', () => { + it('should return files', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const files = [makeRepositoryTreeSchema({ path: 'file.txt' })] + + gitlabMock.Repositories.allRepositoryTrees.mockResolvedValue(files) + + const result = await service.listFiles(repo) + expect(result).toEqual(files) + }) + + it('should return empty array on 404', async () => { + const repoId = 1 + const repo = makeProjectSchema({ id: repoId }) + const error = makeGitbeakerRequestError({ description: '404 Tree Not Found' }) + + gitlabMock.Repositories.allRepositoryTrees.mockRejectedValue(error) + + const result = await service.listFiles(repo) + expect(result).toEqual([]) + }) + }) + + describe('getProjectToken', () => { + it('should return specific token', async () => { + const projectSlug = 'project-1' + const groupId = 456 + const tokenName = `${projectSlug}-bot` + const token = makeAccessTokenSchema({ id: 1, name: tokenName }) + + const gitlabGroupsAllMock = gitlabMock.Groups.all as MockedFunction + gitlabGroupsAllMock.mockResolvedValueOnce({ + data: [{ id: 123, full_path: 'forge' }], + paginationInfo: { next: null }, + }) + gitlabMock.Groups.show.mockResolvedValueOnce({ id: 123, full_path: 'forge' } as ExpandedGroupSchema) + const gitlabGroupsAllSubgroupsMock = gitlabMock.Groups.allSubgroups as MockedFunction + gitlabGroupsAllSubgroupsMock.mockResolvedValueOnce({ + data: [{ id: groupId, name: projectSlug, parent_id: 123, full_path: `forge/${projectSlug}` }], + paginationInfo: { next: null }, + }) + + const gitlabGroupAccessTokensAllMock = gitlabMock.GroupAccessTokens.all as MockedFunction + gitlabGroupAccessTokensAllMock.mockResolvedValue({ + data: [token], + paginationInfo: makeOffsetPagination({ next: null }), + }) + + const result = await service.getProjectToken(projectSlug) + expect(result).toEqual(token) + }) + }) + + describe('createUser', () => { + it('should create user', async () => { + const email = 'user@example.com' + const username = 'user' + const name = 'User Name' + const user = makeExpandedUserSchema({ id: 1, username }) + + gitlabMock.Users.create.mockResolvedValue(user) + + const result = await service.createUser(email, username, name) + + expect(result).toEqual(user) + expect(gitlabMock.Users.create).toHaveBeenCalledWith(expect.objectContaining({ + email, + username, + name, + skipConfirmation: true, + })) + }) + }) + + describe('commitMirror', () => { + it('should create mirror commit', async () => { + const repoId = 1 + + await service.commitMirror(repoId) + + expect(gitlabMock.Commits.create).toHaveBeenCalledWith( + repoId, + 'main', + expect.any(String), + expect.arrayContaining([ + expect.objectContaining({ filePath: '.gitlab-ci.yml', action: 'create' }), + expect.objectContaining({ filePath: 'mirror.sh', action: 'create' }), + ]), + ) + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts new file mode 100644 index 0000000000..ce5a714037 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-client.service.ts @@ -0,0 +1,512 @@ +import type { + AccessTokenScopes, + BaseRequestOptions, + CommitAction, + CondensedGroupSchema, + CondensedProjectSchema, + Gitlab, + GroupSchema, + OffsetPagination, + PaginationRequestOptions, + PipelineTriggerTokenSchema, +} from '@gitbeaker/core' +import { createHash } from 'node:crypto' +import { readFile } from 'node:fs/promises' +import { join } from 'node:path' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { find } from '../../utils/iterable' +import { + GROUP_ROOT_CUSTOM_ATTRIBUTE_KEY, + INFRA_GROUP_CUSTOM_ATTRIBUTE_KEY, + INFRA_GROUP_PATH, + MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, + MIRROR_REPO_NAME, + PROJECT_GROUP_CUSTOM_ATTRIBUTE_KEY, + TOKEN_DESCRIPTION, + TOPIC_PLUGIN_MANAGED, + USER_ID_CUSTOM_ATTRIBUTE_KEY, +} from './gitlab.constants' +import { generateUsername } from './gitlab.utils.js' + +export const GITLAB_REST_CLIENT = Symbol('GITLAB_REST_CLIENT') + +export interface OffsetPaginateOptions { + startPage?: number + perPage?: number + maxPages?: number +} + +@Injectable() +export class GitlabClientService { + private readonly logger = new Logger(GitlabClientService.name) + + constructor( + @Inject(ConfigurationService) readonly config: ConfigurationService, + @Inject(GITLAB_REST_CLIENT) private readonly client: Gitlab, + ) { + } + + async upsertGroupCustomAttribute(groupId: number, key: string, value: string): Promise { + this.logger.verbose(`Upserting a GitLab group custom attribute (groupId=${groupId}, key=${key})`) + try { + await this.client.GroupCustomAttributes.set(groupId, key, value) + } catch (error) { + this.logger.debug(`Failed to upsert a GitLab group custom attribute (groupId=${groupId}, key=${key}): ${error instanceof Error ? error.message : String(error)}`) + } + } + + async upsertProjectCustomAttribute(projectId: number, key: string, value: string): Promise { + this.logger.verbose(`Upserting a GitLab project custom attribute (projectId=${projectId}, key=${key})`) + try { + await this.client.ProjectCustomAttributes.set(projectId, key, value) + } catch (error) { + this.logger.debug(`Failed to upsert a GitLab project custom attribute (projectId=${projectId}, key=${key}): ${error instanceof Error ? error.message : String(error)}`) + } + } + + async upsertUserCustomAttribute(userId: number, key: string, value: string): Promise { + this.logger.verbose(`Upserting a GitLab user custom attribute (userId=${userId}, key=${key})`) + try { + await this.client.UserCustomAttributes.set(userId, key, value) + } catch (error) { + this.logger.debug(`Failed to upsert a GitLab user custom attribute (userId=${userId}, key=${key}): ${error instanceof Error ? error.message : String(error)}`) + } + } + + private async setManagedGroupAttributes(group: CondensedGroupSchema, opts: { isRoot?: boolean, isInfra?: boolean, projectSlug?: string } = {}) { + await this.upsertGroupCustomAttribute(group.id, MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, 'true') + if (opts.isRoot && this.config.projectRootDir) { + await this.upsertGroupCustomAttribute(group.id, GROUP_ROOT_CUSTOM_ATTRIBUTE_KEY, this.config.projectRootDir) + } + if (opts.isInfra) { + await this.upsertGroupCustomAttribute(group.id, INFRA_GROUP_CUSTOM_ATTRIBUTE_KEY, 'true') + } + if (opts.projectSlug) { + await this.upsertGroupCustomAttribute(group.id, PROJECT_GROUP_CUSTOM_ATTRIBUTE_KEY, opts.projectSlug) + } + } + + async getGroupByPath(path: string) { + this.logger.verbose(`Looking up a GitLab group by path ${path}`) + return find( + this.offsetPaginate(opts => this.client.Groups.all({ search: path, orderBy: 'path', ...opts })), + g => g.full_path === path, + ) + } + + async createGroup(path: string) { + this.logger.log(`Creating a GitLab group at path ${path}`) + const created = await this.client.Groups.create(path, path) + if (this.config.projectRootDir && created.full_path === this.config.projectRootDir) { + await this.setManagedGroupAttributes(created, { isRoot: true }) + } + if (this.config.projectRootDir && created.full_path === `${this.config.projectRootDir}/${INFRA_GROUP_PATH}`) { + await this.setManagedGroupAttributes(created, { isInfra: true }) + } + return created + } + + async createSubGroup(parentGroup: CondensedGroupSchema, name: string, fullPath: string) { + this.logger.log(`Creating a GitLab subgroup ${fullPath} (parentId=${parentGroup.id})`) + const created = await this.client.Groups.create(name, name, { parentId: parentGroup.id }) + if (this.config.projectRootDir && fullPath === this.config.projectRootDir) { + await this.setManagedGroupAttributes(created, { isRoot: true }) + } else if (this.config.projectRootDir && fullPath === `${this.config.projectRootDir}/${INFRA_GROUP_PATH}`) { + await this.setManagedGroupAttributes(created, { isInfra: true }) + } else if (this.config.projectRootDir && fullPath.startsWith(`${this.config.projectRootDir}/`) && !fullPath.slice(this.config.projectRootDir.length + 1).includes('/')) { + const projectSlug = fullPath.slice(this.config.projectRootDir.length + 1) + if (projectSlug && projectSlug !== INFRA_GROUP_PATH) { + await this.setManagedGroupAttributes(created, { projectSlug }) + } + } + return created + } + + async getOrCreateGroupByPath(path: string) { + const parts = path.split('/') + const rootGroupPath = parts.shift() + if (!rootGroupPath) throw new Error('Invalid projects root dir') + + this.logger.verbose(`Resolving GitLab group path ${path} (depth=${1 + parts.length})`) + let parentGroup = await this.getGroupByPath(rootGroupPath) ?? await this.createGroup(rootGroupPath) + + let currentFullPath: string + for (const part of parts) { + currentFullPath = `${parentGroup.full_path}/${part}` + parentGroup = await this.getGroupByPath(currentFullPath) ?? await this.createSubGroup(parentGroup, part, currentFullPath) + } + + this.logger.verbose(`GitLab group path resolved (path=${path}, groupId=${parentGroup.id})`) + return parentGroup + } + + async getOrCreateProjectGroup() { + if (!this.config.projectRootDir) throw new Error('projectRootDir not configured') + return this.getOrCreateGroupByPath(this.config.projectRootDir) + } + + async getOrCreateProjectSubGroup(subGroupPath: string) { + const fullPath = this.config.projectRootDir + ? `${this.config.projectRootDir}/${subGroupPath}` + : subGroupPath + return this.getOrCreateGroupByPath(fullPath) + } + + async getOrCreateProjectGroupPublicUrl(): Promise { + const projectGroup = await this.getOrCreateProjectGroup() + return `${this.config.gitlabUrl}/${projectGroup.full_path}` + } + + async getOrCreateInfraGroupRepoPublicUrl(repoName: string): Promise { + const projectGroup = await this.getOrCreateProjectGroup() + return `${this.config.gitlabUrl}/${projectGroup.full_path}/${INFRA_GROUP_PATH}/${repoName}.git` + } + + async getOrCreateProjectGroupInternalRepoUrl(subGroupPath: string, repoName: string): Promise { + const projectGroup = await this.getOrCreateProjectSubGroup(subGroupPath) + const urlBase = this.config.getInternalOrPublicGitlabUrl() + if (!urlBase) throw new Error('GITLAB_URL is required') + return `${urlBase}/${projectGroup.full_path}/${repoName}.git` + } + + async getOrCreateProjectGroupRepo(subGroupPath: string) { + const fullPath = this.config.projectRootDir + ? `${this.config.projectRootDir}/${subGroupPath}` + : subGroupPath + this.logger.verbose(`Resolving a GitLab project repository by path ${fullPath}`) + try { + const existingRepo = await this.client.Projects.show(fullPath) + if (existingRepo) { + this.logger.verbose(`Found a GitLab project repository (path=${fullPath}, repoId=${existingRepo.id})`) + return existingRepo + } + } catch (error) { + if (!(error instanceof GitbeakerRequestError) || !error.cause?.description?.includes('404')) { + throw error + } + } + const repo = await find( + this.offsetPaginate(opts => this.client.Projects.all({ + search: fullPath, + orderBy: 'path', + ...opts, + })), + p => p.path_with_namespace === fullPath, + ) + if (repo) { + this.logger.verbose(`Found a GitLab project repository via search (path=${fullPath}, repoId=${repo.id})`) + return repo + } + const parts = subGroupPath.split('/') + const repoName = parts.pop() + if (!repoName) throw new Error('Invalid repo path') + const parentGroup = await this.getOrCreateProjectSubGroup(parts.join('/')) + try { + const created = await this.client.Projects.create({ + name: repoName, + path: repoName, + namespaceId: parentGroup.id, + defaultBranch: 'main', + }) + this.logger.log(`Created a GitLab project repository (path=${fullPath}, repoId=${created.id})`) + return created + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('has already been taken')) { + this.logger.warn(`GitLab project repository already exists (race); reloading ${fullPath}`) + return this.client.Projects.show(fullPath) + } + throw error + } + } + + async getOrCreateInfraGroupRepo(path: string) { + return this.getOrCreateProjectGroupRepo(join(INFRA_GROUP_PATH, path)) + } + + async getFile(repo: CondensedProjectSchema, filePath: string, ref: string = 'main') { + try { + return await this.client.RepositoryFiles.show(repo.id, filePath, ref) + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('Not Found')) { + this.logger.debug(`GitLab file not found (repoId=${repo.id}, ref=${ref}, filePath=${filePath})`) + return + } + throw error + } + } + + async maybeCreateCommit( + repo: CondensedProjectSchema, + message: string, + actions: CommitAction[], + ref: string = 'main', + ): Promise { + if (actions.length === 0) { + this.logger.debug(`No GitLab commit actions to create (repoId=${repo.id}, ref=${ref})`) + return + } + this.logger.log(`Creating a GitLab commit (repoId=${repo.id}, ref=${ref}, actions=${actions.length})`) + await this.client.Commits.create(repo.id, ref, message, actions) + this.logger.verbose(`GitLab commit created (repoId=${repo.id}, ref=${ref}, actions=${actions.length})`) + } + + async generateCreateOrUpdateAction(repo: CondensedProjectSchema, ref: string, filePath: string, content: string) { + const file = await this.getFile(repo, filePath, ref) + if (file && !hasFileContentChanged(file, content)) { + this.logger.debug(`GitLab file is up to date; skipping commit action (repoId=${repo.id}, ref=${ref}, filePath=${filePath})`) + return null + } + this.logger.verbose(`Prepared a GitLab commit action (repoId=${repo.id}, ref=${ref}, filePath=${filePath}, action=${file ? 'update' : 'create'})`) + return { + action: file ? 'update' : 'create', + filePath, + content, + } satisfies CommitAction + } + + async listFiles(repo: CondensedProjectSchema, options: { path?: string, recursive?: boolean, ref?: string } = {}) { + try { + const path = options.path ?? '/' + const recursive = options.recursive ?? false + const ref = options.ref ?? 'main' + this.logger.verbose(`Listing GitLab repository tree (repoId=${repo.id}, ref=${ref}, path=${path}, recursive=${recursive})`) + const files = await this.client.Repositories.allRepositoryTrees(repo.id, { + path: options.path ?? '/', + recursive: options.recursive ?? false, + ref: options.ref ?? 'main', + }) + this.logger.verbose(`Listed GitLab repository tree (repoId=${repo.id}, ref=${ref}, path=${path}, count=${files.length})`) + return files + } catch (error) { + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('Not Found')) { + return [] + } + if (error instanceof GitbeakerRequestError && error.cause?.description?.includes('404 Tree Not Found')) { + return [] + } + throw error + } + } + + async getProjectGroup(projectSlug: string): Promise { + const parentGroup = await this.getOrCreateProjectGroup() + return find( + this.offsetPaginate(opts => this.client.Groups.allSubgroups(parentGroup.id, opts)), + g => g.name === projectSlug, + ) + } + + async deleteGroup(group: CondensedGroupSchema): Promise { + this.logger.verbose(`Deleting GitLab group ${group.full_path} (groupId=${group.id})`) + await this.client.Groups.remove(group.id) + } + + async getGroupMembers(group: CondensedGroupSchema) { + this.logger.verbose(`Loading GitLab group members (groupId=${group.id})`) + return this.client.GroupMembers.all(group.id) + } + + async addGroupMember(group: CondensedGroupSchema, userId: number, accessLevel: number) { + this.logger.verbose(`Adding a GitLab group member (groupId=${group.id}, userId=${userId}, accessLevel=${accessLevel})`) + return this.client.GroupMembers.add(group.id, userId, accessLevel) + } + + async editGroupMember(group: CondensedGroupSchema, userId: number, accessLevel: number) { + this.logger.verbose(`Editing a GitLab group member (groupId=${group.id}, userId=${userId}, accessLevel=${accessLevel})`) + return this.client.GroupMembers.edit(group.id, userId, accessLevel) + } + + async removeGroupMember(group: CondensedGroupSchema, userId: number) { + this.logger.verbose(`Removing a GitLab group member (groupId=${group.id}, userId=${userId})`) + return this.client.GroupMembers.remove(group.id, userId) + } + + async getUserByEmail(email: string) { + const users = await this.client.Users.all({ search: email, orderBy: 'username' }) + if (users.length === 0) return null + return users[0] + } + + async createUser(email: string, username: string, name: string) { + this.logger.log(`Creating a GitLab user (email=${email}, username=${username})`) + return await this.client.Users.create({ + email, + username, + name, + skipConfirmation: true, + }) + } + + async upsertUser(user: { id: string, email: string, firstName: string, lastName: string }) { + const existing = await this.getUserByEmail(user.email) + const username = generateUsername(user.email) + const name = `${user.firstName} ${user.lastName}`.trim() + const gitlabUser = existing ?? await this.createUser(user.email, username, name) + await this.upsertUserCustomAttribute(gitlabUser.id, USER_ID_CUSTOM_ATTRIBUTE_KEY, user.id) + return gitlabUser + } + + async* getRepos(projectSlug: string) { + const group = await this.getOrCreateProjectSubGroup(projectSlug) + const repos = this.offsetPaginate(opts => this.client.Groups.allProjects(group.id, { simple: false, ...opts })) + for await (const repo of repos) { + yield repo + } + } + + async upsertProjectGroupRepo(projectSlug: string, repoName: string, description?: string) { + const repo = await this.getOrCreateProjectGroupRepo(`${projectSlug}/${repoName}`) + const updated = await this.client.Projects.edit(repo.id, { + name: repoName, + path: repoName, + topics: [TOPIC_PLUGIN_MANAGED], + description, + }) + await this.upsertProjectCustomAttribute(repo.id, MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY, 'true') + return updated + } + + async deleteProjectGroupRepo(projectSlug: string, repoName: string) { + const repo = await this.getOrCreateProjectGroupRepo(`${projectSlug}/${repoName}`) + return this.client.Projects.remove(repo.id) + } + + async commitMirror(repoId: number) { + this.logger.log(`Creating a GitLab mirror bootstrap commit (repoId=${repoId})`) + const actions: CommitAction[] = [ + { + action: 'create', + filePath: '.gitlab-ci.yml', + content: await readGitlabCIConfigContent(), + execute_filemode: false, + }, + { + action: 'create', + filePath: 'mirror.sh', + content: await readMirrorScriptContent(), + execute_filemode: true, + }, + ] + + await this.client.Commits.create( + repoId, + 'main', + 'ci: :construction_worker: first mirror', + actions, + ) + this.logger.verbose(`GitLab mirror bootstrap commit created (repoId=${repoId}, actions=${actions.length})`) + } + + async upsertProjectMirrorRepo(projectSlug: string) { + return this.upsertProjectGroupRepo(projectSlug, MIRROR_REPO_NAME) + } + + async getProjectToken(projectSlug: string) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + return find( + this.offsetPaginate<{ name: string }>( + opts => this.client.GroupAccessTokens.all(group.id, opts) as unknown as Promise<{ data: { name: string }[], paginationInfo: OffsetPagination }>, + ), + token => token.name === `${projectSlug}-bot`, + ) + } + + async createProjectToken(projectSlug: string, tokenName: string, scopes: AccessTokenScopes[]) { + const group = await this.getProjectGroup(projectSlug) + if (!group) throw new Error('Unable to retrieve gitlab project group') + const expirationDays = Number(this.config.gitlabMirrorTokenExpirationDays) + const effectiveExpirationDays = Number.isFinite(expirationDays) && expirationDays > 0 ? expirationDays : 30 + const expiryDate = new Date(Date.now() + effectiveExpirationDays * 24 * 60 * 60 * 1000) + this.logger.log(`Creating a GitLab group access token (projectSlug=${projectSlug}, tokenName=${tokenName}, expiry=${expiryDate.toISOString().slice(0, 10)})`) + return this.client.GroupAccessTokens.create(group.id, tokenName, scopes, expiryDate.toISOString().slice(0, 10)) + } + + async createMirrorAccessToken(projectSlug: string) { + const tokenName = `${projectSlug}-bot` + return this.createProjectToken(projectSlug, tokenName, ['write_repository', 'read_repository', 'read_api']) + } + + async getOrCreateMirrorPipelineTriggerToken(projectSlug: string): Promise { + const mirrorRepo = await this.upsertProjectMirrorRepo(projectSlug) + this.logger.verbose(`Resolving a GitLab pipeline trigger token (projectSlug=${projectSlug}, repoId=${mirrorRepo.id})`) + const currentTriggerToken = await find( + this.offsetPaginate(opts => this.client.PipelineTriggerTokens.all(mirrorRepo.id, opts)), + token => token.description === TOKEN_DESCRIPTION, + ) + if (currentTriggerToken) { + this.logger.verbose(`GitLab pipeline trigger token found (projectSlug=${projectSlug}, repoId=${mirrorRepo.id})`) + return currentTriggerToken + } + const created = await this.client.PipelineTriggerTokens.create(mirrorRepo.id, TOKEN_DESCRIPTION) + this.logger.log(`GitLab pipeline trigger token created (projectSlug=${projectSlug}, repoId=${mirrorRepo.id})`) + return created + } + + private async* offsetPaginate( + request: (options: PaginationRequestOptions<'offset'> & BaseRequestOptions) => Promise<{ data: T[], paginationInfo: OffsetPagination }>, + options?: OffsetPaginateOptions, + ): AsyncGenerator { + let page: number | null = options?.startPage ?? 1 + let pagesFetched = 0 + let total = 0 + + this.logger.debug(`Pagination start (page=${page})`) + + while (page !== null) { + if (options?.maxPages && pagesFetched >= options.maxPages) { + page = null + continue + } + + try { + const { data, paginationInfo } = await request({ + page, + perPage: options?.perPage, + maxPages: options?.maxPages, + showExpanded: true, + pagination: 'offset', + }) + + pagesFetched += 1 + total += data.length + + const nextPage = paginationInfo.next ?? null + this.logger.debug(`Pagination page fetched (page=${page}, nextPage=${nextPage ?? 'null'}, items=${data.length}, total=${total})`) + + for (const item of data) { + yield item + } + + page = nextPage + } catch (error) { + if (error instanceof Error) { + this.logger.error(`Pagination request failed (page=${page}): ${error.message}`, error.stack) + } else { + this.logger.error(`Pagination request failed (page=${page}): ${String(error)}`) + } + throw error + } + } + + this.logger.debug(`Pagination done (total=${total})`) + } +} + +function hasFileContentChanged(file: { content_sha256?: string } | null | undefined, content: string) { + return file?.content_sha256 !== digestContent(content) +} + +function digestContent(content: string) { + return createHash('sha256').update(content).digest('hex') +} + +function readGitlabCIConfigContent() { + return readFile(join(__dirname, './files/.gitlab-ci.yml'), 'utf-8') +} + +function readMirrorScriptContent() { + return readFile(join(__dirname, './files/mirror.sh'), 'utf-8') +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts new file mode 100644 index 0000000000..893414df6f --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-datastore.service.ts @@ -0,0 +1,103 @@ +import type { Prisma } from '@prisma/client' +import { Inject, Injectable } from '@nestjs/common' +import { PrismaService } from '../../cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + description: true, + owner: { + select: { + id: true, + email: true, + firstName: true, + lastName: true, + adminRoleIds: true, + }, + }, + plugins: { + select: { + key: true, + value: true, + }, + }, + roles: { + select: { + id: true, + oidcGroup: true, + }, + }, + members: { + select: { + user: { + select: { + id: true, + email: true, + firstName: true, + lastName: true, + adminRoleIds: true, + }, + }, + roleIds: true, + }, + }, + repositories: { + select: { + id: true, + internalRepoName: true, + isInfra: true, + isPrivate: true, + externalRepoUrl: true, + externalUserName: true, + }, + }, + clusters: { + select: { + id: true, + label: true, + zone: { + select: { + slug: true, + }, + }, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +@Injectable() +export class GitlabDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + where: { + plugins: { + some: { + pluginName: 'gitlab', + }, + }, + }, + }) + } + + async getProject(id: string): Promise { + return this.prisma.project.findUnique({ + where: { id }, + select: projectSelect, + }) + } + + async getUser(id: string) { + return this.prisma.user.findUnique({ + where: { + id, + }, + }) + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-health.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-health.service.ts new file mode 100644 index 0000000000..54ad2a6fcf --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-health.service.ts @@ -0,0 +1,26 @@ +import { Inject, Injectable } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' + +@Injectable() +export class GitlabHealthService { + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(HealthIndicatorService) private readonly healthIndicator: HealthIndicatorService, + ) {} + + async check(key: string) { + const indicator = this.healthIndicator.check(key) + const urlBase = this.config.getInternalOrPublicGitlabUrl() + if (!urlBase) return indicator.down('Not configured') + + const url = new URL('/-/health', urlBase).toString() + try { + const response = await fetch(url) + if (response.status < 500) return indicator.up({ httpStatus: response.status }) + return indicator.down({ httpStatus: response.status }) + } catch (error) { + return indicator.down(error instanceof Error ? error.message : String(error)) + } + } +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts b/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts new file mode 100644 index 0000000000..9b1d025011 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab-testing.utils.ts @@ -0,0 +1,357 @@ +import type { + AccessTokenExposedSchema, + AccessTokenSchema, + ExpandedGroupSchema, + ExpandedUserSchema, + GroupSchema, + MemberSchema, + OffsetPagination, + PipelineTriggerTokenSchema, + ProjectSchema, + RepositoryFileExpandedSchema, + RepositoryTreeSchema, +} from '@gitbeaker/core' +import type { ProjectWithDetails } from './gitlab-datastore.service' +import { faker } from '@faker-js/faker' +import { AccessLevel } from '@gitbeaker/core' +import { GitbeakerRequestError } from '@gitbeaker/requester-utils' + +export function makeExpandedUserSchema(overrides: Partial = {}): ExpandedUserSchema { + const isoDate = faker.date.past().toISOString() + return { + id: 1, + name: 'User', + username: 'user', + state: 'active', + avatar_url: '', + web_url: 'https://gitlab.example/users/user', + created_at: isoDate, + locked: null, + bio: null, + bot: false, + location: null, + public_email: null, + skype: null, + linkedin: null, + twitter: null, + discord: null, + website_url: null, + pronouns: null, + organization: null, + job_title: null, + work_information: null, + followers: null, + following: null, + local_time: null, + is_followed: null, + is_admin: null, + last_sign_in_at: isoDate, + confirmed_at: isoDate, + last_activity_on: isoDate, + email: 'user@example.com', + theme_id: 1, + color_scheme_id: 1, + projects_limit: 0, + current_sign_in_at: null, + note: null, + identities: null, + can_create_group: false, + can_create_project: false, + two_factor_enabled: false, + external: false, + private_profile: null, + namespace_id: null, + created_by: null, + ...overrides, + } satisfies ExpandedUserSchema +} + +export function makeMemberSchema(overrides: Partial = {}) { + return { + id: 1, + username: 'user', + name: 'User', + state: 'active', + avatar_url: '', + web_url: 'https://gitlab.example/users/user', + expires_at: faker.date.future().toISOString(), + access_level: 30, + email: 'user@example.com', + group_saml_identity: { + extern_uid: '', + provider: '', + saml_provider_id: 1, + }, + ...overrides, + } satisfies MemberSchema +} + +export function makeGroupSchema(overrides: Partial = {}) { + return { + id: 123, + web_url: 'https://gitlab.example/groups/forge', + name: 'forge', + avatar_url: '', + full_name: 'forge', + full_path: 'forge', + path: 'forge', + description: '', + visibility: 'private', + share_with_group_lock: false, + require_two_factor_authentication: false, + two_factor_grace_period: 0, + project_creation_level: 'maintainer', + subgroup_creation_level: 'maintainer', + lfs_enabled: true, + default_branch_protection: 0, + request_access_enabled: false, + created_at: faker.date.past().toISOString(), + parent_id: 0, + ...overrides, + } satisfies GroupSchema +} + +export function makeExpandedGroupSchema(overrides: Partial = {}): ExpandedGroupSchema { + return { + id: 123, + web_url: 'https://gitlab.example/groups/forge', + name: 'forge', + avatar_url: '', + full_name: 'forge', + full_path: 'forge', + path: 'forge', + description: '', + visibility: 'private', + share_with_group_lock: false, + require_two_factor_authentication: false, + two_factor_grace_period: 0, + project_creation_level: 'maintainer', + subgroup_creation_level: 'maintainer', + lfs_enabled: true, + default_branch_protection: 0, + request_access_enabled: false, + created_at: faker.date.past().toISOString(), + parent_id: 0, + runners_token: 'runners-token', + file_template_project_id: 1, + shared_with_groups: null, + ...overrides, + } satisfies ExpandedGroupSchema +} + +export function makeProjectSchema(overrides: Partial = {}) { + return { + id: 1, + web_url: 'https://gitlab.example/projects/1', + name: 'repo', + path: 'repo', + description: '', + name_with_namespace: 'forge / repo', + path_with_namespace: 'forge/repo', + created_at: faker.date.past().toISOString(), + default_branch: 'main', + topics: [], + ssh_url_to_repo: 'ssh://gitlab.example/forge/repo.git', + http_url_to_repo: 'https://gitlab.example/forge/repo.git', + readme_url: '', + forks_count: 0, + avatar_url: null, + star_count: 0, + last_activity_at: faker.date.future().toISOString(), + namespace: { id: 1, name: 'forge', path: 'forge', kind: 'group', full_path: 'forge', avatar_url: '', web_url: 'https://gitlab.example/groups/forge' }, + description_html: '', + visibility: 'private', + empty_repo: false, + owner: { id: 1, name: 'Owner', created_at: faker.date.past().toISOString() }, + issues_enabled: true, + open_issues_count: 0, + merge_requests_enabled: true, + jobs_enabled: true, + wiki_enabled: true, + snippets_enabled: true, + can_create_merge_request_in: true, + resolve_outdated_diff_discussions: false, + container_registry_access_level: 'enabled', + security_and_compliance_access_level: 'enabled', + container_expiration_policy: { + cadence: '1d', + enabled: false, + keep_n: null, + older_than: null, + name_regex_delete: null, + name_regex_keep: null, + next_run_at: faker.date.future().toISOString(), + }, + updated_at: faker.date.past().toISOString(), + creator_id: 1, + import_url: null, + import_type: null, + import_status: 'none', + import_error: null, + permissions: { + project_access: { access_level: 0, notification_level: 0 }, + group_access: { access_level: 0, notification_level: 0 }, + }, + archived: false, + license_url: '', + license: { key: 'mit', name: 'MIT', nickname: 'MIT', html_url: '', source_url: '' }, + shared_runners_enabled: true, + group_runners_enabled: true, + runners_token: '', + ci_default_git_depth: 0, + ci_forward_deployment_enabled: false, + ci_forward_deployment_rollback_allowed: false, + ci_allow_fork_pipelines_to_run_in_parent_project: false, + ci_separated_caches: false, + ci_restrict_pipeline_cancellation_role: '', + public_jobs: false, + shared_with_groups: null, + repository_storage: '', + only_allow_merge_if_pipeline_succeeds: false, + allow_merge_on_skipped_pipeline: false, + restrict_user_defined_variables: false, + only_allow_merge_if_all_discussions_are_resolved: false, + remove_source_branch_after_merge: false, + printing_merge_requests_link_enabled: false, + request_access_enabled: false, + merge_method: '', + squash_option: '', + auto_devops_enabled: false, + auto_devops_deploy_strategy: '', + mirror: false, + mirror_user_id: 1, + mirror_trigger_builds: false, + only_mirror_protected_branches: false, + mirror_overwrites_diverged_branches: false, + external_authorization_classification_label: '', + packages_enabled: false, + service_desk_enabled: false, + service_desk_address: 'service-desk@example.com', + service_desk_reply_to: 'service-desk@example.com', + autoclose_referenced_issues: false, + suggestion_commit_message: 'Add suggestion commit message', + enforce_auth_checks_on_uploads: false, + merge_commit_template: 'Add suggestion commit message', + squash_commit_template: 'Add suggestion commit message', + issue_branch_template: 'Add suggestion commit message', + marked_for_deletion_on: faker.date.future().toISOString(), + compliance_frameworks: [], + warn_about_potentially_unwanted_characters: false, + container_registry_image_prefix: 'registry.gitlab.example/forge/repo', + _links: { + self: 'https://gitlab.example/projects/1', + issues: 'https://gitlab.example/projects/1/issues', + merge_requests: 'https://gitlab.example/projects/1/merge_requests', + repo_branches: 'https://gitlab.example/projects/1/repository/branches', + labels: 'https://gitlab.example/projects/1/labels', + events: 'https://gitlab.example/projects/1/events', + members: 'https://gitlab.example/projects/1/members', + cluster_agents: 'https://gitlab.example/projects/1/cluster_agents', + }, + ...overrides, + } satisfies ProjectSchema +} + +export function makeProjectWithDetails(overrides: Partial = {}) { + return { + id: 'p1', + slug: 'project-1', + name: 'Project 1', + description: 'Test project', + owner: { id: 'o1', email: 'owner@example.com', firstName: 'Owner', lastName: 'User', adminRoleIds: [] }, + plugins: [], + roles: [], + members: [], + repositories: [], + clusters: [], + ...overrides, + } satisfies ProjectWithDetails +} + +export function makePipelineTriggerToken(overrides: Partial = {}) { + return { + id: 1, + description: 'mirroring-from-external-repo', + created_at: faker.date.past().toISOString(), + last_used: null, + token: 'trigger-token', + updated_at: faker.date.past().toISOString(), + owner: null, + repoId: 1, + ...overrides, + } satisfies PipelineTriggerTokenSchema +} + +export function makeOffsetPagination(overrides: Partial = {}) { + return { + total: 1, + next: null, + current: 1, + previous: null, + perPage: 20, + totalPages: 1, + ...overrides, + } satisfies OffsetPagination +} + +export function makeAccessTokenSchema(overrides: Partial = {}) { + const isoDate = faker.date.past().toISOString() + return { + id: 1, + user_id: 1, + name: 'token', + expires_at: isoDate, + active: true, + created_at: isoDate, + revoked: false, + access_level: AccessLevel.DEVELOPER, + ...overrides, + } satisfies AccessTokenSchema +} + +export function makeAccessTokenExposedSchema(overrides: Partial = {}) { + return { + ...makeAccessTokenSchema(), + token: 'secret-token', + ...overrides, + } satisfies AccessTokenExposedSchema +} + +export function makeRepositoryFileExpandedSchema(overrides: Partial = {}) { + return { + file_name: 'file.txt', + file_path: 'file.txt', + size: 7, + encoding: 'base64', + content: 'content', + content_sha256: 'sha256', + ref: 'main', + blob_id: 'blob', + commit_id: 'commit', + last_commit_id: 'last-commit', + ...overrides, + } satisfies RepositoryFileExpandedSchema +} + +export function makeRepositoryTreeSchema(overrides: Partial = {}) { + return { + id: 'id', + name: 'file.txt', + type: 'blob', + path: 'file.txt', + mode: '100644', + ...overrides, + } satisfies RepositoryTreeSchema +} + +export function makeGitbeakerRequestError(params: { message?: string, status?: number, statusText?: string, description: string }) { + const request = new Request('https://gitlab.internal.example/api') + const response = new Response(null, { status: params.status ?? 404, statusText: params.statusText ?? 'Not Found' }) + return new GitbeakerRequestError(params.message ?? params.statusText ?? 'Error', { + cause: { + description: params.description, + request, + response, + }, + }) +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts new file mode 100644 index 0000000000..b52f8b95a5 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.constants.ts @@ -0,0 +1,22 @@ +export const INFRA_GROUP_NAME = 'Infra' +export const INFRA_GROUP_PATH = 'infra' +export const INFRA_APPS_REPO_NAME = 'infra-apps' +export const MIRROR_REPO_NAME = 'mirror' +export const TOPIC_PLUGIN_MANAGED = 'plugin-managed' +export const TOKEN_DESCRIPTION = 'mirroring-from-external-repo' + +export const DEFAULT_ADMIN_GROUP_PATH = '/console/admin' +export const DEFAULT_AUDITOR_GROUP_PATH = '/console/readonly' +export const DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX = '/console/admin' +export const DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX = '/console/developer,/console/devops' +export const DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX = '/console/readonly' + +export const GROUP_ROOT_CUSTOM_ATTRIBUTE_KEY = 'cpn_projects_root_dir' +export const INFRA_GROUP_CUSTOM_ATTRIBUTE_KEY = 'cpn_infra_group' +export const PROJECT_GROUP_CUSTOM_ATTRIBUTE_KEY = 'cpn_project_slug' +export const USER_ID_CUSTOM_ATTRIBUTE_KEY = 'cpn_user_id' +export const MANAGED_BY_CONSOLE_CUSTOM_ATTRIBUTE_KEY = 'cpn_managed_by_console' + +export function customAttributesFilter(key: string, value: string) { + return { [`custom_attributes[${key}]`]: value } as Record +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts new file mode 100644 index 0000000000..954b10b84b --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.module.ts @@ -0,0 +1,32 @@ +import { Gitlab } from '@gitbeaker/rest' +import { Module } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { InfrastructureModule } from '../../cpin-module/infrastructure/infrastructure.module' +import { VaultModule } from '../vault/vault.module' +import { GITLAB_REST_CLIENT, GitlabClientService } from './gitlab-client.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { GitlabHealthService } from './gitlab-health.service' +import { GitlabService } from './gitlab.service' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule, VaultModule], + providers: [ + { + provide: GITLAB_REST_CLIENT, + inject: [ConfigurationService], + useFactory: (config: ConfigurationService) => new Gitlab({ + token: config.gitlabToken, + host: config.getInternalOrPublicGitlabUrl(), + }), + }, + HealthIndicatorService, + GitlabClientService, + GitlabDatastoreService, + GitlabHealthService, + GitlabService, + ], + exports: [GitlabClientService, GitlabHealthService], +}) +export class GitlabModule {} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts new file mode 100644 index 0000000000..e948a7071a --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.spec.ts @@ -0,0 +1,336 @@ +import type { AccessTokenExposedSchema } from '@gitbeaker/core' +import type { Mocked } from 'vitest' +import { ENABLED } from '@cpn-console/shared' +import { faker } from '@faker-js/faker' +import { AccessLevel } from '@gitbeaker/core' +import { Test } from '@nestjs/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { VaultClientService } from '../vault/vault-client.service' +import { GitlabClientService } from './gitlab-client.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { makeAccessTokenExposedSchema, makeExpandedUserSchema, makeGroupSchema, makeMemberSchema, makePipelineTriggerToken, makeProjectSchema, makeProjectWithDetails } from './gitlab-testing.utils' +import { TOPIC_PLUGIN_MANAGED } from './gitlab.constants' +import { GitlabService } from './gitlab.service' + +function createGitlabControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + GitlabService, + { + provide: GitlabClientService, + useValue: { + getOrCreateProjectSubGroup: vi.fn(), + getGroupMembers: vi.fn(), + addGroupMember: vi.fn(), + editGroupMember: vi.fn(), + removeGroupMember: vi.fn(), + upsertUser: vi.fn(), + getRepos: vi.fn(), + getProjectToken: vi.fn(), + deleteGroup: vi.fn(), + commitMirror: vi.fn(), + getOrCreateMirrorPipelineTriggerToken: vi.fn(), + createProjectToken: vi.fn(), + createMirrorAccessToken: vi.fn(), + upsertProjectGroupRepo: vi.fn(), + upsertProjectMirrorRepo: vi.fn(), + getOrCreateProjectGroupInternalRepoUrl: vi.fn(), + deleteProjectGroupRepo: vi.fn(), + getOrCreateInfraGroupRepoPublicUrl: vi.fn(), + } satisfies Partial, + }, + { + provide: GitlabDatastoreService, + useValue: { + getAllProjects: vi.fn(), + } satisfies Partial, + }, + { + provide: VaultClientService, + useValue: { + read: vi.fn(), + write: vi.fn(), + delete: vi.fn(), + readGitlabMirrorCreds: vi.fn(), + writeGitlabMirrorCreds: vi.fn(), + deleteGitlabMirrorCreds: vi.fn(), + readTechnReadOnlyCreds: vi.fn(), + writeTechReadOnlyCreds: vi.fn(), + writeMirrorTriggerToken: vi.fn(), + } satisfies Partial, + }, + { + provide: ConfigurationService, + useValue: { + projectRootDir: 'forge', + }, + }, + ], + }) +} + +describe('gitlabService', () => { + let service: GitlabService + let gitlab: Mocked + let vault: Mocked + let gitlabDatastore: Mocked + + beforeEach(async () => { + const moduleRef = await createGitlabControllerServiceTestingModule().compile() + service = moduleRef.get(GitlabService) + gitlab = moduleRef.get(GitlabClientService) + vault = moduleRef.get(VaultClientService) + gitlabDatastore = moduleRef.get(GitlabDatastoreService) + + vault.writeGitlabMirrorCreds.mockResolvedValue(undefined) + vault.deleteGitlabMirrorCreds.mockResolvedValue(undefined) + vault.writeTechReadOnlyCreds.mockResolvedValue(undefined) + vault.writeMirrorTriggerToken.mockResolvedValue(undefined) + vault.readTechnReadOnlyCreds.mockResolvedValue(null) + vault.readGitlabMirrorCreds.mockResolvedValue(null) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + + describe('handleUpsert', () => { + it('should reconcile project members and repositories', async () => { + const project = makeProjectWithDetails() + const group = makeGroupSchema({ + id: 123, + full_path: 'forge/console/project-1', + full_name: 'forge/console/project-1', + name: 'project-1', + path: 'project-1', + parent_id: 1, + }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectGroupRepo.mockResolvedValue(makeProjectSchema({ id: 1 })) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateInfraGroupRepoPublicUrl.mockResolvedValue('https://gitlab.internal/repo') + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + gitlab.upsertUser.mockResolvedValue(makeExpandedUserSchema({ id: 123, username: 'user' })) + + await service.handleUpsert(project) + + expect(gitlab.getOrCreateProjectSubGroup).toHaveBeenCalledWith(project.slug) + expect(gitlab.getGroupMembers).toHaveBeenCalledWith(group) + expect(gitlab.getRepos).toHaveBeenCalledWith(project.slug) + }) + + it('should remove orphan member if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 999, username: 'orphan' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).toHaveBeenCalledWith(group, 999) + }) + + it('should not remove managed user (bot) even if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 888, username: 'group_123_bot' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).not.toHaveBeenCalled() + }) + + it('should not remove orphan member if purge disabled', async () => { + const project = makeProjectWithDetails() + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([makeMemberSchema({ id: 999, username: 'orphan' })]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.removeGroupMember).not.toHaveBeenCalled() + }) + + it('should delete orphan repositories if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + repositories: [], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + const orphanRepo = makeProjectSchema({ name: 'orphan-repo', topics: [TOPIC_PLUGIN_MANAGED] }) + const unmanagedRepo = makeProjectSchema({ name: 'unmanaged-repo', topics: [] }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockImplementation(() => (async function* () { + yield orphanRepo + yield unmanagedRepo + })()) + gitlab.deleteProjectGroupRepo.mockResolvedValue(undefined) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.deleteProjectGroupRepo).toHaveBeenCalledWith(project.slug, 'orphan-repo') + expect(gitlab.deleteProjectGroupRepo).toHaveBeenCalledTimes(1) + }) + + it('should not delete orphan repositories if purge disabled', async () => { + const project = makeProjectWithDetails({ + repositories: [], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + const orphanRepo = makeProjectSchema({ name: 'orphan-repo', topics: [TOPIC_PLUGIN_MANAGED] }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockImplementation(() => (async function* () { + yield orphanRepo + })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.deleteProjectGroupRepo).not.toHaveBeenCalled() + }) + + it('should not delete orphan repositories without the correct topic even if purge enabled', async () => { + const project = makeProjectWithDetails({ + plugins: [{ key: 'purge', value: ENABLED }], + repositories: [], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + const orphanRepoWithoutTopic = makeProjectSchema({ name: 'orphan-repo', topics: [] }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockImplementation(() => (async function* () { + yield orphanRepoWithoutTopic + })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.deleteProjectGroupRepo).not.toHaveBeenCalled() + }) + + it('should create gitlab user if not exists', async () => { + const project = makeProjectWithDetails({ + members: [{ user: { id: 'u1', email: 'new@example.com', firstName: 'New', lastName: 'User', adminRoleIds: [] }, roleIds: [] }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.upsertUser.mockImplementation(async (user) => { + return makeExpandedUserSchema({ + id: user.email === 'new@example.com' ? 999 : 998, + email: user.email, + username: user.email.split('@')[0] ?? user.email, + name: `${user.firstName} ${user.lastName}`, + }) + }) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.upsertUser).toHaveBeenCalledWith(expect.objectContaining({ email: 'new@example.com' })) + expect(gitlab.upsertUser).toHaveBeenCalledWith(expect.objectContaining({ email: 'owner@example.com' })) + expect(gitlab.addGroupMember).toHaveBeenCalledWith(group, 999, AccessLevel.GUEST) + expect(gitlab.addGroupMember).toHaveBeenCalledWith(group, 998, AccessLevel.OWNER) + }) + + it('should configure repository mirroring if external url is present', async () => { + const project = makeProjectWithDetails({ + repositories: [{ + id: 'r1', + internalRepoName: 'repo-1', + externalRepoUrl: 'https://github.com/org/repo.git', + isPrivate: true, + externalUserName: 'user', + isInfra: false, + }], + }) + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + const gitlabRepo = makeProjectSchema({ id: 101, name: 'repo-1', path: 'repo-1', path_with_namespace: 'forge/console/project-1/repo-1' }) + const accessToken = makeAccessTokenExposedSchema({ + name: 'bot', + scopes: ['read_api'], + access_level: 40, + }) + + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { yield gitlabRepo })()) + gitlab.getOrCreateProjectGroupInternalRepoUrl.mockResolvedValue('https://gitlab.internal/group/repo-1.git') + gitlab.createMirrorAccessToken.mockResolvedValue(accessToken) + vault.readTechnReadOnlyCreds.mockResolvedValue(null) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleUpsert(project) + + expect(gitlab.createMirrorAccessToken).toHaveBeenCalledWith('project-1') + expect(gitlab.upsertProjectMirrorRepo).toHaveBeenCalledWith('project-1') + + expect(vault.writeGitlabMirrorCreds).toHaveBeenCalledWith( + 'project-1', + 'repo-1', + expect.objectContaining({ + GIT_INPUT_URL: 'github.com/org/repo.git', + GIT_OUTPUT_USER: 'bot', + GIT_OUTPUT_PASSWORD: accessToken.token, + }), + ) + expect(vault.writeTechReadOnlyCreds).toHaveBeenCalledWith('project-1', { + MIRROR_USER: 'bot', + MIRROR_TOKEN: accessToken.token, + }) + }) + }) + + describe('handleCron', () => { + it('should reconcile all projects', async () => { + const projects = [makeProjectWithDetails({ id: 'p1', slug: 'project-1' })] + gitlabDatastore.getAllProjects.mockResolvedValue(projects) + + const group = makeGroupSchema({ id: 123, name: 'project-1', path: 'project-1', full_path: 'forge/console/project-1', full_name: 'forge/console/project-1', parent_id: 1 }) + gitlab.getOrCreateProjectSubGroup.mockResolvedValue(group) + gitlab.getGroupMembers.mockResolvedValue([]) + gitlab.getRepos.mockReturnValue((async function* () { })()) + gitlab.upsertProjectMirrorRepo.mockResolvedValue(makeProjectSchema({ id: 1, name: 'mirror', path: 'mirror', path_with_namespace: 'forge/console/project-1/mirror', empty_repo: false })) + gitlab.getOrCreateMirrorPipelineTriggerToken.mockResolvedValue(makePipelineTriggerToken()) + + await service.handleCron() + + expect(gitlabDatastore.getAllProjects).toHaveBeenCalled() + expect(gitlab.getOrCreateProjectSubGroup).toHaveBeenCalledWith('project-1') + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts new file mode 100644 index 0000000000..0abc166131 --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.service.ts @@ -0,0 +1,481 @@ +import type { CondensedGroupSchema, MemberSchema, ProjectSchema } from '@gitbeaker/core' +import type { VaultSecret } from '../vault/vault-client.service' +import type { ProjectWithDetails } from './gitlab-datastore.service' +import { specificallyEnabled } from '@cpn-console/hooks' +import { AccessLevel } from '@gitbeaker/core' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { trace } from '@opentelemetry/api' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { StartActiveSpan } from '../../cpin-module/infrastructure/telemetry/telemetry.decorator' +import { getAll } from '../../utils/iterable' +import { VaultClientService } from '../vault/vault-client.service' +import { GitlabClientService } from './gitlab-client.service' +import { GitlabDatastoreService } from './gitlab-datastore.service' +import { INFRA_APPS_REPO_NAME, TOPIC_PLUGIN_MANAGED } from './gitlab.constants' +import { DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX, DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX, DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX } from './gitlab.constants.js' +import { generateUsernameCandidates } from './gitlab.utils' + +const ownedUserRegex = /group_\d+_bot/u + +@Injectable() +export class GitlabService { + private readonly logger = new Logger(GitlabService.name) + + constructor( + @Inject(GitlabDatastoreService) private readonly gitlabDatastore: GitlabDatastoreService, + @Inject(GitlabClientService) private readonly gitlab: GitlabClientService, + @Inject(VaultClientService) private readonly vault: VaultClientService, + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) { + this.logger.log('GitLabService initialized') + } + + @OnEvent('project.upsert') + @StartActiveSpan() + async handleUpsert(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project upsert event for ${project.slug}`) + await this.ensureProjectGroup(project) + this.logger.log(`GitLab sync completed for project ${project.slug}`) + } + + @OnEvent('project.delete') + @StartActiveSpan() + async handleDelete(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project delete event for ${project.slug}`) + await this.ensureProjectGroup(project) + this.logger.log(`GitLab sync completed for project ${project.slug}`) + } + + @Cron(CronExpression.EVERY_HOUR) + @StartActiveSpan() + async handleCron() { + const span = trace.getActiveSpan() + span?.setAttribute('gitlab.projects.count', 0) + this.logger.log('Starting GitLab reconciliation') + const projects = await this.gitlabDatastore.getAllProjects() + span?.setAttribute('gitlab.projects.count', projects.length) + this.logger.log(`Loaded ${projects.length} projects for GitLab reconciliation`) + await this.ensureProjectGroups(projects) + this.logger.log(`GitLab reconciliation completed (${projects.length})`) + } + + @StartActiveSpan() + private async ensureProjectGroups(projects: ProjectWithDetails[]) { + const span = trace.getActiveSpan() + span?.setAttribute('gitlab.projects.count', projects.length) + this.logger.verbose(`Reconciling GitLab project groups (${projects.length})`) + await Promise.all(projects.map(p => this.ensureProjectGroup(p))) + } + + @StartActiveSpan() + private async ensureProjectGroup(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.verbose(`Reconciling GitLab project group (${project.slug})`) + const group = await this.gitlab.getOrCreateProjectSubGroup(project.slug) + const members = await this.gitlab.getGroupMembers(group) + this.logger.verbose(`Loaded GitLab project group state (${project.slug}): groupId=${group.id} members=${members.length}`) + await this.ensureProjectGroupMembers(project, group, members) + await this.ensureProjectRepos(project) + await this.purgeOrphanRepos(project) + await this.ensureSystemRepos(project) + this.logger.verbose(`GitLab project group reconciled (${project.slug})`) + } + + @StartActiveSpan() + private async ensureProjectGroupMembers( + project: ProjectWithDetails, + group: CondensedGroupSchema, + members: MemberSchema[], + ) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.verbose(`Reconciling GitLab group members for project ${project.slug} (groupId=${group.id}, members=${members.length})`) + await this.addMissingMembers(project, group, members) + await this.addMissingOwnerMember(project, group, members) + await this.purgeOrphanMembers(project, group, members) + } + + private async addMissingMembers( + project: ProjectWithDetails, + group: CondensedGroupSchema, + members: MemberSchema[], + ) { + const membersById = new Map(members.map(m => [m.id, m])) + const accessLevelByUserId = generateAccessLevelMapping(project) + + await Promise.all(project.members.map(async ({ user }) => { + const gitlabUser = await this.gitlab.upsertUser(user) + if (!gitlabUser) { + this.logger.warn(`Unable to resolve a GitLab user for a project member (project=${project.slug}, userId=${user.id}, email=${user.email})`) + return + } + const accessLevel = accessLevelByUserId.get(user.id) ?? AccessLevel.NO_ACCESS + await this.ensureGroupMemberAccessLevel(group, gitlabUser.id, accessLevel, membersById) + })) + } + + private async ensureGroupMemberAccessLevel( + group: CondensedGroupSchema, + gitlabUserId: number, + accessLevel: AccessLevel, + membersById: Map, + ) { + const existingMember = membersById.get(gitlabUserId) + + if (accessLevel === AccessLevel.NO_ACCESS) { + if (existingMember) { + await this.gitlab.removeGroupMember(group, gitlabUserId) + } + return + } + + if (!existingMember) { + await this.gitlab.addGroupMember(group, gitlabUserId, accessLevel) + return + } + + if (existingMember.access_level !== accessLevel) { + await this.gitlab.editGroupMember(group, gitlabUserId, accessLevel) + } + } + + private async addMissingOwnerMember( + project: ProjectWithDetails, + group: CondensedGroupSchema, + members: MemberSchema[], + ) { + const gitlabUser = await this.gitlab.upsertUser(project.owner) + if (!gitlabUser) { + this.logger.warn(`Unable to resolve the GitLab owner account (project=${project.slug}, ownerId=${project.owner.id}, email=${project.owner.email})`) + return + } + const membersById = new Map(members.map(m => [m.id, m])) + await this.ensureGroupMemberAccessLevel(group, gitlabUser.id, AccessLevel.OWNER, membersById) + } + + @StartActiveSpan() + private async purgeOrphanMembers( + project: ProjectWithDetails, + group: CondensedGroupSchema, + members: MemberSchema[], + ) { + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': project.slug, + 'group.id': group.id, + 'members.total': members.length, + }) + const purgeConfig = getPluginConfig(project, 'purge') + const usernames = new Set([ + ...generateUsernameCandidates(project.owner.email), + ...project.members.flatMap(m => generateUsernameCandidates(m.user.email)), + ]) + const emails = new Set([ + project.owner.email.toLowerCase(), + ...project.members.map(m => m.user.email.toLowerCase()), + ]) + + const orphans = members.filter((member) => { + if (isOwnedUser(member)) return false + if (usernames.has(member.username)) return false + if (member.email && emails.has(member.email.toLowerCase())) return false + return true + }) + span?.setAttribute('orphans.count', orphans.length) + + if (specificallyEnabled(purgeConfig)) { + span?.setAttribute('purge.enabled', true) + let removedCount = 0 + await Promise.all(orphans.map(async (orphan) => { + await this.gitlab.removeGroupMember(group, orphan.id) + removedCount++ + this.logger.log(`Removed a user from the GitLab group (groupId=${group.id}, username=${orphan.username})`) + })) + span?.setAttribute('orphans.removed.count', removedCount) + } else { + span?.setAttribute('purge.enabled', false) + let warnedCount = 0 + for (const orphan of orphans) { + warnedCount++ + this.logger.warn(`User is in the GitLab group but not in the project (purge disabled, username=${orphan.username})`) + } + span?.setAttribute('orphans.warned.count', warnedCount) + } + } + + @StartActiveSpan() + private async ensureProjectRepos(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': project.slug, + 'repositories.count': project.repositories.length, + }) + const gitlabRepositories = await getAll(this.gitlab.getRepos(project.slug)) + span?.setAttribute('gitlab.repositories.count', gitlabRepositories.length) + let mirroringEnabledCount = 0 + let mirroringDisabledCount = 0 + for (const repo of project.repositories) { + const externalHost = this.getExternalRepoHost(repo.externalRepoUrl) + span?.addEvent('gitlab.repo.reconcile', { + 'repository.name': repo.internalRepoName, + 'repository.isPrivate': repo.isPrivate, + ...(externalHost ? { 'repository.external.host': externalHost } : {}), + 'repository.external': !!repo.externalRepoUrl, + }) + await this.ensureRepository(project, repo, gitlabRepositories) + + if (repo.externalRepoUrl) { + span?.setAttribute('repository.mirroring', true) + mirroringEnabledCount++ + await this.configureRepositoryMirroring(project, repo) + } else { + span?.setAttribute('repository.mirroring', false) + mirroringDisabledCount++ + await this.vault.deleteGitlabMirrorCreds(project.slug, repo.internalRepoName) + } + } + span?.setAttribute('repositories.mirroring.enabled.count', mirroringEnabledCount) + span?.setAttribute('repositories.mirroring.disabled.count', mirroringDisabledCount) + } + + @StartActiveSpan() + private async purgeOrphanRepos(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + const gitlabRepositories = await getAll(this.gitlab.getRepos(project.slug)) + span?.setAttribute('gitlab.repositories.count', gitlabRepositories.length) + + const orphanRepos = gitlabRepositories.filter(r => isOwnedRepo(r) && !isSystemRepo(project, r)) + span?.setAttribute('orphan.repositories.count', orphanRepos.length) + + if (specificallyEnabled(getPluginConfig(project, 'purge'))) { + span?.setAttribute('purge.enabled', true) + let removedCount = 0 + await Promise.all(orphanRepos.map(async (orphan) => { + await this.gitlab.deleteProjectGroupRepo(project.slug, orphan.name) + removedCount++ + this.logger.log(`Removed a repository from the GitLab project (project=${project.slug}, repoName=${orphan.name})`) + })) + + span?.setAttribute('orphan.repositories.removed.count', removedCount) + } else { + span?.setAttribute('purge.enabled', false) + let warnedCount = 0 + for (const orphan of orphanRepos) { + warnedCount++ + this.logger.warn(`Repository is in GitLab but not in the project definition (purge disabled, project=${project.slug}, repoName=${orphan.name})`) + } + span?.setAttribute('managed.repositories.warned.count', warnedCount) + } + } + + private async ensureRepository( + project: ProjectWithDetails, + repo: ProjectWithDetails['repositories'][number], + gitlabRepositories: ProjectSchema[], + ) { + return gitlabRepositories.find(r => r.name === repo.internalRepoName) + ?? await this.gitlab.upsertProjectGroupRepo( + project.slug, + repo.internalRepoName, + ) + } + + @StartActiveSpan() + private async configureRepositoryMirroring( + project: ProjectWithDetails, + repo: ProjectWithDetails['repositories'][number], + ) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + span?.setAttribute('repository.internalRepoName', repo.internalRepoName) + span?.setAttribute('repository.isPrivate', repo.isPrivate) + const externalHost = this.getExternalRepoHost(repo.externalRepoUrl) + if (externalHost) span?.setAttribute('repository.external.host', externalHost) + + const currentVaultSecret = await this.vault.readGitlabMirrorCreds(project.slug, repo.internalRepoName) + span?.setAttribute('vault.secret.exists', !!currentVaultSecret) + if (!currentVaultSecret) { + this.logger.warn(`No existing mirror credentials found in Vault; rotating new credentials (project=${project.slug}, repoName=${repo.internalRepoName})`) + } + + const internalRepoUrl = await this.gitlab.getOrCreateProjectGroupInternalRepoUrl(project.slug, repo.internalRepoName) + const externalRepoUrn = repo.externalRepoUrl.split('://')[1] + const internalRepoUrn = internalRepoUrl.split('://')[1] + span?.setAttribute('repository.externalRepoUrn', externalRepoUrn) + span?.setAttribute('repository.internalRepoUrn', internalRepoUrn) + + const projectMirrorCreds = await this.getOrRotateMirrorCreds(project.slug) + + const mirrorSecretData = { + GIT_INPUT_URL: externalRepoUrn, + GIT_INPUT_USER: repo.isPrivate ? repo.externalUserName : undefined, + GIT_INPUT_PASSWORD: currentVaultSecret?.data?.GIT_INPUT_PASSWORD, // Preserve existing password as it's not in DB + GIT_OUTPUT_URL: internalRepoUrn, + GIT_OUTPUT_USER: projectMirrorCreds.MIRROR_USER, + GIT_OUTPUT_PASSWORD: projectMirrorCreds.MIRROR_TOKEN, + } + + // Write to vault if changed + // Using simplified check + await this.vault.writeGitlabMirrorCreds(project.slug, repo.internalRepoName, mirrorSecretData) + span?.setAttribute('vault.secret.written', true) + } + + @StartActiveSpan() + private async ensureSystemRepos(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + await Promise.all([ + this.ensureInfraAppsRepo(project.slug), + this.ensureMirrorRepo(project.slug), + ]) + } + + private async ensureInfraAppsRepo(projectSlug: string) { + await this.gitlab.upsertProjectGroupRepo(projectSlug, INFRA_APPS_REPO_NAME) + } + + private async ensureMirrorRepo(projectSlug: string) { + const mirrorRepo = await this.gitlab.upsertProjectMirrorRepo(projectSlug) + if (mirrorRepo.empty_repo) { + await this.gitlab.commitMirror(mirrorRepo.id) + } + await this.ensureMirrorRepoTriggerToken(projectSlug) + } + + @StartActiveSpan() + private async ensureMirrorRepoTriggerToken(projectSlug: string) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', projectSlug) + const triggerToken = await this.gitlab.getOrCreateMirrorPipelineTriggerToken(projectSlug) + const gitlabSecret = { + PROJECT_SLUG: projectSlug, + GIT_MIRROR_PROJECT_ID: triggerToken.repoId, + GIT_MIRROR_TOKEN: triggerToken.token, + } + await this.vault.writeMirrorTriggerToken(gitlabSecret) + span?.setAttribute('vault.secret.written', true) + } + + @StartActiveSpan() + private async getOrRotateMirrorCreds(projectSlug: string) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', projectSlug) + const vaultSecret = await this.vault.readTechnReadOnlyCreds(projectSlug) + if (!vaultSecret) return this.createMirrorAccessToken(projectSlug) + + const isExpiring = this.isMirrorCredsExpiring(vaultSecret) + span?.setAttribute('mirror.creds.expiring', isExpiring) + if (!isExpiring) { + span?.setAttribute('mirror.creds.rotated', false) + return vaultSecret.data as { MIRROR_USER: string, MIRROR_TOKEN: string } + } + return this.createMirrorAccessToken(projectSlug) + } + + @StartActiveSpan() + private async createMirrorAccessToken(projectSlug: string) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', projectSlug) + span?.setAttribute('mirror.creds.rotated', true) + const token = await this.gitlab.createMirrorAccessToken(projectSlug) + const creds = { + MIRROR_USER: token.name, + MIRROR_TOKEN: token.token, + } + await this.vault.writeTechReadOnlyCreds(projectSlug, creds) + span?.setAttribute('vault.secret.written', true) + return creds + } + + private isMirrorCredsExpiring(vaultSecret: VaultSecret): boolean { + if (!vaultSecret?.metadata?.created_time) return false + const createdTime = new Date(vaultSecret.metadata.created_time) + return daysAgoFromNow(createdTime) > this.config.gitlabMirrorTokenRotationThresholdDays + } + + private getExternalRepoHost(externalRepoUrl: string | null | undefined): string | undefined { + if (!externalRepoUrl) return undefined + try { + return new URL(externalRepoUrl).host + } catch { + return undefined + } + } +} + +function isOwnedUser(member: MemberSchema) { + return ownedUserRegex.test(member.username) +} + +function isOwnedRepo(repo: ProjectSchema) { + return repo.topics?.includes(TOPIC_PLUGIN_MANAGED) ?? false +} + +function isSystemRepo(project: ProjectWithDetails, repo: ProjectSchema) { + return project.repositories.some(r => r.internalRepoName === repo.name) +} + +function getPluginConfig(project: ProjectWithDetails, key: string) { + return project.plugins?.find(p => p.key === key)?.value +} + +function getGroupPathSuffixes(project: ProjectWithDetails, key: string) { + const value = getPluginConfig(project, key) + if (!value) return null + return value.split(',').map(path => `/${project.slug}${path}`) +} + +function generateAccessLevelMapping(project: ProjectWithDetails) { + const projectReporterGroupPathSuffixes = getProjectReporterGroupPaths(project) + const projectDeveloperGroupPathSuffixes = getProjectDeveloperGroupPaths(project) + const projectMaintainerGroupPathSuffixes = getProjectMaintainerGroupPaths(project) + + const getAccessLevelFromOidcGroup = (oidcGroup: string | null) => { + if (!oidcGroup) return null + if (projectReporterGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.REPORTER + if (projectDeveloperGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.DEVELOPER + if (projectMaintainerGroupPathSuffixes.includes(oidcGroup)) return AccessLevel.MAINTAINER + return null + } + + const roleAccessLevelById = new Map( + project.roles.map(role => [role.id, getAccessLevelFromOidcGroup(role.oidcGroup)]), + ) + + return new Map(project.members.map((membership) => { + let highest = AccessLevel.GUEST + for (const roleId of membership.roleIds) { + const level = roleAccessLevelById.get(roleId) + if (level !== null && level !== undefined && level > highest) highest = level + } + return [membership.user.id, highest] as const + })) +} + +function getProjectMaintainerGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectMaintainerGroupPathSuffix') + ?? DEFAULT_PROJECT_MAINTAINER_GROUP_PATH_SUFFIX.split(',').map(path => `/${project.slug}${path}`) +} + +function getProjectDeveloperGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectDeveloperGroupPathSuffix') + ?? DEFAULT_PROJECT_DEVELOPER_GROUP_PATH_SUFFIX.split(',').map(path => `/${project.slug}${path}`) +} + +function getProjectReporterGroupPaths(project: ProjectWithDetails) { + return getGroupPathSuffixes(project, 'projectReporterGroupPathSuffix') + ?? DEFAULT_PROJECT_REPORTER_GROUP_PATH_SUFFIX.split(',').map(path => `/${project.slug}${path}`) +} + +function daysAgoFromNow(date: Date) { + return Math.floor((Date.now() - date.getTime()) / (1000 * 60 * 60 * 24)) +} diff --git a/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts new file mode 100644 index 0000000000..3a5eb62b7d --- /dev/null +++ b/apps/server-nestjs/src/modules/gitlab/gitlab.utils.ts @@ -0,0 +1,14 @@ +export function generateUsername(email: string) { + return email.split('@')[0] ?? email +} + +export function generateFullyQualifiedUsername(email: string) { + return email.replace('@', '.') +} + +export function generateUsernameCandidates(email: string) { + return [ + generateUsername(email), + generateFullyQualifiedUsername(email), + ] +} diff --git a/apps/server-nestjs/src/modules/healthz/healthz.controller.ts b/apps/server-nestjs/src/modules/healthz/healthz.controller.ts index 68724cd78a..ab01a10357 100644 --- a/apps/server-nestjs/src/modules/healthz/healthz.controller.ts +++ b/apps/server-nestjs/src/modules/healthz/healthz.controller.ts @@ -1,7 +1,10 @@ import { Controller, Get, Inject } from '@nestjs/common' import { HealthCheck, HealthCheckService } from '@nestjs/terminus' import { DatabaseHealthService } from '../../cpin-module/infrastructure/database/database-health.service' +import { ArgoCDHealthService } from '../argocd/argocd-health.service' +import { GitlabHealthService } from '../gitlab/gitlab-health.service' import { KeycloakHealthService } from '../keycloak/keycloak-health.service' +import { VaultHealthService } from '../vault/vault-health.service' @Controller('api/v1/healthz') export class HealthzController { @@ -9,6 +12,9 @@ export class HealthzController { @Inject(HealthCheckService) private readonly health: HealthCheckService, @Inject(DatabaseHealthService) private readonly database: DatabaseHealthService, @Inject(KeycloakHealthService) private readonly keycloak: KeycloakHealthService, + @Inject(GitlabHealthService) private readonly gitlab: GitlabHealthService, + @Inject(VaultHealthService) private readonly vault: VaultHealthService, + @Inject(ArgoCDHealthService) private readonly argocd: ArgoCDHealthService, ) {} @Get() @@ -17,6 +23,9 @@ export class HealthzController { return this.health.check([ () => this.database.check('database'), () => this.keycloak.check('keycloak'), + () => this.gitlab.check('gitlab'), + () => this.vault.check('vault'), + () => this.argocd.check('argocd'), ]) } } diff --git a/apps/server-nestjs/src/modules/healthz/healthz.module.ts b/apps/server-nestjs/src/modules/healthz/healthz.module.ts index 5dbd41a00d..995ae24a5f 100644 --- a/apps/server-nestjs/src/modules/healthz/healthz.module.ts +++ b/apps/server-nestjs/src/modules/healthz/healthz.module.ts @@ -1,7 +1,10 @@ import { Module } from '@nestjs/common' import { TerminusModule } from '@nestjs/terminus' import { DatabaseModule } from '../../cpin-module/infrastructure/database/database.module' +import { ArgoCDModule } from '../argocd/argocd.module' +import { GitlabModule } from '../gitlab/gitlab.module' import { KeycloakModule } from '../keycloak/keycloak.module' +import { VaultModule } from '../vault/vault.module' import { HealthzController } from './healthz.controller' @Module({ @@ -9,6 +12,9 @@ import { HealthzController } from './healthz.controller' TerminusModule, DatabaseModule, KeycloakModule, + GitlabModule, + VaultModule, + ArgoCDModule, ], controllers: [HealthzController], }) diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak-client.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak-client.service.ts index 1d20d60bd7..7465ba190b 100644 --- a/apps/server-nestjs/src/modules/keycloak/keycloak-client.service.ts +++ b/apps/server-nestjs/src/modules/keycloak/keycloak-client.service.ts @@ -27,6 +27,7 @@ export class KeycloakClientService implements OnModuleInit { let first = 0 while (true) { const fetched = await this.client.groups.find({ first, max: SUBGROUPS_PAGINATE_QUERY_MAX, briefRepresentation: false }) + this.logger.verbose(`Loaded a Keycloak groups page (first=${first}, count=${fetched.length})`) if (fetched.length === 0) break for (const group of fetched) { yield group @@ -46,15 +47,21 @@ export class KeycloakClientService implements OnModuleInit { async getGroupByPath(path: string): Promise { const parts = path.split('/').filter(Boolean) + this.logger.verbose(`Resolving Keycloak group path ${path} (depth=${parts.length})`) let current: GroupRepresentationWith<'id'> | undefined + if (parts.length === 0) return undefined for (const name of parts) { current = current ? await this.getSubGroupByName(current.id, name) : await this.getRootGroupByName(name) - if (!current) return undefined + if (!current) { + this.logger.verbose(`Keycloak group path segment was not found (path=${path}, missing=${name})`) + return undefined + } } + this.logger.verbose(`Keycloak group path resolved (path=${path}, groupId=${current?.id})`) return current } @@ -78,6 +85,7 @@ export class KeycloakClientService implements OnModuleInit { async deleteGroup(id: string): Promise { const span = trace.getActiveSpan() span?.setAttribute('keycloak.group.id', id) + this.logger.log(`Deleting Keycloak group (groupId=${id})`) await this.client.groups.del({ id }) } @@ -85,6 +93,7 @@ export class KeycloakClientService implements OnModuleInit { const span = trace.getActiveSpan() span?.setAttribute('keycloak.group.id', groupId) const members = await this.client.groups.listMembers({ id: groupId }) + this.logger.verbose(`Loaded Keycloak group members (groupId=${groupId}, count=${members?.length ?? 0})`) return members || [] } @@ -92,7 +101,7 @@ export class KeycloakClientService implements OnModuleInit { async createGroup(name: string) { const span = trace.getActiveSpan() span?.setAttribute('group.name', name) - this.logger.debug(`Creating Keycloak group: ${name}`) + this.logger.debug(`Creating Keycloak group ${name}`) const result = await this.client.groups.create({ name }) return { ...result, name } as GroupRepresentation } @@ -100,12 +109,14 @@ export class KeycloakClientService implements OnModuleInit { async addUserToGroup(userId: string, groupId: string) { const span = trace.getActiveSpan() span?.setAttribute('keycloak.group.id', groupId) + this.logger.verbose(`Adding user to Keycloak group (userId=${userId}, groupId=${groupId})`) return this.client.users.addToGroup({ id: userId, groupId }) } async removeUserFromGroup(userId: string, groupId: string) { const span = trace.getActiveSpan() span?.setAttribute('keycloak.group.id', groupId) + this.logger.verbose(`Removing user from Keycloak group (userId=${userId}, groupId=${groupId})`) return this.client.users.delFromGroup({ id: userId, groupId }) } @@ -118,6 +129,7 @@ export class KeycloakClientService implements OnModuleInit { max: SUBGROUPS_PAGINATE_QUERY_MAX, first, }) + this.logger.verbose(`Loaded a Keycloak subgroups page (parentId=${parentId}, first=${first}, count=${page.length})`) if (page.length === 0) break for (const subgroup of page) { yield subgroup @@ -130,8 +142,12 @@ export class KeycloakClientService implements OnModuleInit { async getOrCreateGroupByPath(path: string) { const span = trace.getActiveSpan() span?.setAttribute('group.path.depth', path.split('/').filter(Boolean).length) + this.logger.verbose(`Ensuring Keycloak group path exists: ${path}`) const existingGroup = await this.getGroupByPath(path) - if (existingGroup) return existingGroup + if (existingGroup) { + this.logger.verbose(`Keycloak group already exists at path ${path}`) + return existingGroup + } const parts = path.split('/').filter(Boolean) let parentId: string | undefined @@ -149,6 +165,9 @@ export class KeycloakClientService implements OnModuleInit { parentId = current?.id } + if (current) { + this.logger.log(`Created Keycloak group path ${path} (groupId=${current.id})`) + } return { ...current, path } as GroupRepresentation } @@ -162,7 +181,7 @@ export class KeycloakClientService implements OnModuleInit { return subgroup } } - this.logger.debug(`Creating SubGroup ${name} under parent ${parentId}`) + this.logger.debug(`Creating Keycloak subgroup ${name} under parentId=${parentId}`) const createdGroup = await this.client.groups.createChildGroup({ id: parentId }, { name }) return { id: createdGroup.id, name } satisfies GroupRepresentation } @@ -170,6 +189,7 @@ export class KeycloakClientService implements OnModuleInit { async getOrCreateConsoleGroup(projectGroup: GroupRepresentationWith<'id'>) { const span = trace.getActiveSpan() span?.setAttribute('keycloak.group.id', projectGroup.id) + this.logger.verbose(`Ensuring Keycloak console group exists (projectGroupId=${projectGroup.id})`) return this.getOrCreateSubGroupByName(projectGroup.id, CONSOLE_GROUP_NAME) } @@ -218,19 +238,21 @@ export class KeycloakClientService implements OnModuleInit { this.getOrCreateSubGroupByName(envGroup.id, 'RO'), this.getOrCreateSubGroupByName(envGroup.id, 'RW'), ]) + this.logger.verbose(`Resolved Keycloak environment groups (consoleGroupId=${consoleGroup.id}, env=${environment.name}, envGroupId=${envGroup.id})`) return { roGroup, rwGroup } } async onModuleInit() { if (!this.config.keycloakRealm) { - this.logger.fatal('Keycloak realm not configured') + this.logger.fatal('Keycloak realm is not configured') return } if (!this.config.keycloakAdmin || !this.config.keycloakAdminPassword) { - this.logger.fatal('Keycloak admin or admin password not configured') + this.logger.fatal('Keycloak admin username or password is not configured') return } try { + this.logger.log(`Authenticating Keycloak admin client (realm=${this.config.keycloakRealm})`) await this.client.auth({ clientId: 'admin-cli', grantType: 'password', @@ -238,10 +260,14 @@ export class KeycloakClientService implements OnModuleInit { password: this.config.keycloakAdminPassword, }) } catch (err) { - this.logger.error({ err }, 'Keycloak Admin Client authentication failed') + if (err instanceof Error) { + this.logger.error(`Keycloak Admin Client authentication failed: ${err.message}`, err.stack) + } else { + this.logger.error(`Keycloak Admin Client authentication failed: ${String(err)}`) + } throw err } this.client.setConfig({ realmName: this.config.keycloakRealm }) - this.logger.log('Keycloak Admin Client authenticated') + this.logger.log(`Keycloak Admin Client authenticated (realm=${this.config.keycloakRealm})`) } } diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak-health.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak-health.service.ts index ff84eeb076..126219e8ca 100644 --- a/apps/server-nestjs/src/modules/keycloak/keycloak-health.service.ts +++ b/apps/server-nestjs/src/modules/keycloak/keycloak-health.service.ts @@ -19,7 +19,7 @@ export class KeycloakHealthService { const baseUrl = `${protocol}://${domain}` const url = new URL(`/realms/${encodeURIComponent(realm)}/.well-known/openid-configuration`, baseUrl).toString() try { - const response = await fetch(url, { method: 'GET' }) + const response = await fetch(url) if (response.status < 500) return indicator.up({ httpStatus: response.status }) return indicator.down({ httpStatus: response.status }) } catch (error) { diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts b/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts index d9d8058099..b0889146c9 100644 --- a/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts +++ b/apps/server-nestjs/src/modules/keycloak/keycloak.module.ts @@ -1,8 +1,8 @@ import KcAdminClient from '@keycloak/keycloak-admin-client' import { Module } from '@nestjs/common' import { HealthIndicatorService } from '@nestjs/terminus' -import { ConfigurationService } from 'src/cpin-module/infrastructure/configuration/configuration.service' import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' import { InfrastructureModule } from '../../cpin-module/infrastructure/infrastructure.module' import { KEYCLOAK_ADMIN_CLIENT, KeycloakClientService } from './keycloak-client.service' import { KeycloakDatastoreService } from './keycloak-datastore.service' diff --git a/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts b/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts index 7973a42e68..ddea98cb45 100644 --- a/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts +++ b/apps/server-nestjs/src/modules/keycloak/keycloak.service.ts @@ -28,8 +28,9 @@ export class KeycloakService { async handleUpsert(project: ProjectWithDetails) { const span = trace.getActiveSpan() span?.setAttribute('project.slug', project.slug) - this.logger.log(`Handling project upsert for ${project.slug}`) + this.logger.log(`Handling a project upsert event for ${project.slug}`) await this.ensureProjectGroups([project]) + this.logger.log(`Keycloak sync completed for project ${project.slug}`) } @OnEvent('project.delete') @@ -37,8 +38,9 @@ export class KeycloakService { async handleDelete(project: ProjectWithDetails) { const span = trace.getActiveSpan() span?.setAttribute('project.slug', project.slug) - this.logger.log(`Handling project delete for ${project.slug}`) + this.logger.log(`Handling a project delete event for ${project.slug}`) await this.purgeOrphanGroups([project]) + this.logger.log(`Keycloak cleanup completed for project ${project.slug}`) } @Cron(CronExpression.EVERY_HOUR) @@ -48,15 +50,17 @@ export class KeycloakService { this.logger.log('Starting periodic Keycloak reconciliation') const projects = await this.keycloakDatastore.getAllProjects() span?.setAttribute('keycloak.projects.count', projects.length) - this.logger.debug(`Reconciling ${projects.length} projects`) + this.logger.debug(`Reconciling Keycloak projects (count=${projects.length})`) await this.ensureProjectGroups(projects) await this.purgeOrphanGroups(projects) + this.logger.log(`Keycloak reconciliation completed (${projects.length})`) } @StartActiveSpan() private async ensureProjectGroups(projects: ProjectWithDetails[]) { const span = trace.getActiveSpan() span?.setAttribute('keycloak.projects.count', projects.length) + this.logger.verbose(`Reconciling Keycloak project groups (${projects.length})`) await Promise.all(projects.map(project => this.ensureProjectGroup(project))) } @@ -69,6 +73,7 @@ export class KeycloakService { 'project.roles.count': project.roles.length, 'project.environments.count': project.environments.length, }) + this.logger.verbose(`Reconciling Keycloak project group (${project.slug}): members=${project.members.length} roles=${project.roles.length}`) const projectGroup = z.object({ id: z.string(), @@ -81,6 +86,7 @@ export class KeycloakService { this.ensureProjectGroupMembers(project, projectGroup), this.ensureConsoleGroup(project, projectGroup), ]) + this.logger.verbose(`Keycloak project group reconciled (${project.slug}): groupId=${projectGroup.id}`) } @StartActiveSpan() @@ -91,6 +97,7 @@ export class KeycloakService { id: z.string(), name: z.string(), }).parse(await this.keycloak.getOrCreateConsoleGroup(group)) + this.logger.verbose(`Reconciling Keycloak console group (${project.slug}): projectGroupId=${group.id} consoleGroupId=${consoleGroup.id}`) await Promise.all([ this.ensureRoleGroups(project, consoleGroup), this.ensureEnvironmentGroups(project, consoleGroup), @@ -109,13 +116,14 @@ export class KeycloakService { }).parse(group) }) const projectSlugs = new Set(projects.map(p => p.slug)) + this.logger.verbose(`Scanning Keycloak groups for orphan cleanup (projects=${projects.length})`) const promises: Promise[] = [] let purgedCount = 0 for await (const group of groups) { if (!projectSlugs.has(group.name)) { if (this.isOwnedProjectGroup(group)) { - this.logger.log(`Deleting orphan Keycloak group: ${group.name}`) + this.logger.log(`Deleting an orphan Keycloak group (groupId=${group.id}, groupName=${group.name})`) purgedCount++ promises.push(this.keycloak.deleteGroup(group.id)) } @@ -123,6 +131,7 @@ export class KeycloakService { } span?.setAttribute('purged.count', purgedCount) await Promise.all(promises) + this.logger.log(`Orphan Keycloak group cleanup completed (purged=${purgedCount})`) } private isOwnedProjectGroup(group: GroupRepresentationWith<'subGroups'>) { @@ -135,12 +144,12 @@ export class KeycloakService { private async maybeAddUserToGroup(userId: string, groupId: string, groupName: string) { try { await this.keycloak.addUserToGroup(userId, groupId) - this.logger.log(`Added ${userId} to keycloak group ${groupName}`) + this.logger.log(`Added user to Keycloak group: userId=${userId} groupId=${groupId} groupName=${groupName}`) } catch (e) { if (e.response?.status === 404) { - this.logger.warn(`User ${userId} not found in Keycloak, skipping addition to group ${groupName}`) + this.logger.warn(`User not found in Keycloak, skipping addition: userId=${userId} groupId=${groupId} groupName=${groupName}`) } else if (e.response?.status === 409) { - this.logger.debug(`User ${userId} is already a member of keycloak group ${groupName}`) + this.logger.verbose(`User already a member of Keycloak group: userId=${userId} groupId=${groupId} groupName=${groupName}`) } else { throw e } @@ -150,10 +159,10 @@ export class KeycloakService { private async maybeRemoveUserFromGroup(userId: string, groupId: string, groupName: string) { try { await this.keycloak.removeUserFromGroup(userId, groupId) - this.logger.log(`Removed ${userId} from keycloak group ${groupName}`) + this.logger.log(`Removed user from Keycloak group: userId=${userId} groupId=${groupId} groupName=${groupName}`) } catch (e) { if (e.response?.status === 404) { - this.logger.warn(`User ${userId} not found in Keycloak, skipping removal from group ${groupName}`) + this.logger.warn(`User not found in Keycloak, skipping removal: userId=${userId} groupId=${groupId} groupName=${groupName}`) } else { throw e } @@ -398,6 +407,7 @@ export class KeycloakService { span?.setAttribute('keycloak.env_group.rw.id', rwGroup.id) span?.setAttribute('keycloak.env_group.ro.members.current', roMembers.length) span?.setAttribute('keycloak.env_group.rw.members.current', rwMembers.length) + this.logger.verbose(`Reconciling Keycloak environment group members: project=${project.slug} env=${environment.name} roMembers=${roMembers.length} rwMembers=${rwMembers.length}`) const projectUserIds = new Set([project.ownerId, ...project.members.map(m => m.user.id)]) span?.setAttribute('project.users.count', projectUserIds.size) @@ -433,6 +443,7 @@ export class KeycloakService { span?.setAttribute('keycloak.env_group.ro.members.removed', roRemoved) span?.setAttribute('keycloak.env_group.rw.members.added', rwAdded) span?.setAttribute('keycloak.env_group.rw.members.removed', rwRemoved) + this.logger.verbose(`Keycloak environment group members reconciled: project=${project.slug} env=${environment.name} roAdded=${roAdded} roRemoved=${roRemoved} rwAdded=${rwAdded} rwRemoved=${rwRemoved}`) } @StartActiveSpan() @@ -452,6 +463,7 @@ export class KeycloakService { span?.setAttribute('keycloak.env_group.rw.id', rwGroup.id) span?.setAttribute('keycloak.env_group.ro.members.current', roMembers.length) span?.setAttribute('keycloak.env_group.rw.members.current', rwMembers.length) + this.logger.verbose(`Purging orphan Keycloak environment group members: project=${project.slug} env=${environment.name} roMembers=${roMembers.length} rwMembers=${rwMembers.length}`) const projectUserIds = new Set([project.ownerId, ...project.members.map(m => m.user.id)]) span?.setAttribute('project.users.count', projectUserIds.size) @@ -482,6 +494,7 @@ export class KeycloakService { span?.setAttribute('keycloak.env_group.ro.members.removed', roRemoved) span?.setAttribute('keycloak.env_group.rw.members.removed', rwRemoved) + this.logger.log(`Orphan Keycloak environment group member cleanup completed: project=${project.slug} env=${environment.name} roRemoved=${roRemoved} rwRemoved=${rwRemoved}`) } @StartActiveSpan() @@ -493,6 +506,7 @@ export class KeycloakService { span?.setAttribute('project.slug', project.slug) span?.setAttribute('keycloak.group.id', group.id) span?.setAttribute('keycloak.group.name', group.name) + this.logger.verbose(`Scanning Keycloak environment groups for orphan cleanup: project=${project.slug} groupId=${group.id} groupName=${group.name}`) const envGroups = map(this.keycloak.getSubGroups(group.id), envGroup => z.object({ id: z.string(), @@ -512,16 +526,17 @@ export class KeycloakService { if (this.isEnvironmentGroup(subGroups) && !this.isOwnedEnvironmentGroup(project, envGroup)) { orphanCount++ - this.logger.log(`Deleting orphan environment group ${envGroup.name} for project ${project.slug}`) + this.logger.log(`Deleting orphan Keycloak environment group: project=${project.slug} envGroupId=${envGroup.id} envGroupName=${envGroup.name}`) promises.push( this.keycloak.deleteGroup(envGroup.id) - .catch(e => this.logger.warn(`Failed to delete environment group ${envGroup.name} for project ${project.slug}`, e)), + .catch(err => this.logger.warn(`Failed to delete orphan Keycloak environment group: project=${project.slug} envGroupId=${envGroup.id} envGroupName=${envGroup.name} err=${err instanceof Error ? err.message : String(err)}`)), ) } } span?.setAttribute('keycloak.env_groups.orphan.count', orphanCount) await Promise.all(promises) + this.logger.log(`Orphan Keycloak environment group cleanup completed: project=${project.slug} groupId=${group.id} orphanCount=${orphanCount}`) } private isEnvironmentGroup( diff --git a/apps/server-nestjs/src/modules/vault/vault-client.service.spec.ts b/apps/server-nestjs/src/modules/vault/vault-client.service.spec.ts new file mode 100644 index 0000000000..08e2949fb4 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-client.service.spec.ts @@ -0,0 +1,122 @@ +import { Test } from '@nestjs/testing' +import { http, HttpResponse } from 'msw' +import { setupServer } from 'msw/node' +import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { VaultClientService } from './vault-client.service' +import { VaultError, VaultHttpClientService } from './vault-http-client.service' + +const vaultUrl = 'https://vault.internal' + +const server = setupServer( + http.post(`${vaultUrl}/v1/auth/token/create`, () => { + return HttpResponse.json({ auth: { client_token: 'token' } }) + }), + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({ data: { data: { secret: 'value' }, metadata: { created_time: '2023-01-01T00:00:00.000Z', version: 1 } } }) + }), + http.post(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}) + }), + http.delete(`${vaultUrl}/v1/kv/metadata/:path`, () => { + return new HttpResponse(null, { status: 204 }) + }), +) + +function createVaultServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + VaultClientService, + VaultHttpClientService, + { + provide: ConfigurationService, + useValue: { + vaultToken: 'token', + vaultUrl, + vaultInternalUrl: vaultUrl, + vaultKvName: 'kv', + getInternalOrPublicVaultUrl: () => vaultUrl, + } satisfies Partial, + }, + ], + }) +} + +describe('vault', () => { + let service: VaultClientService + + beforeAll(() => server.listen()) + beforeEach(async () => { + const module = await createVaultServiceTestingModule().compile() + service = module.get(VaultClientService) + }) + afterEach(() => server.resetHandlers()) + afterAll(() => server.close()) + + describe('getProjectValues', () => { + it('should get project values', async () => { + const result = await service.readProjectValues('project-id') + expect(result).toEqual({ secret: 'value' }) + }) + + it('should return empty object if undefined', async () => { + server.use( + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}, { status: 404 }) + }), + ) + + const result = await service.readProjectValues('project-id') + expect(result).toEqual(undefined) + }) + }) + + describe('read', () => { + it('should read secret', async () => { + const result = await service.read('path') + expect(result).toEqual({ + data: { secret: 'value' }, + metadata: { created_time: '2023-01-01T00:00:00.000Z', version: 1 }, + }) + }) + + it('should throw if 404', async () => { + server.use( + http.get(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({}, { status: 404 }) + }), + ) + + await expect(service.read('path')).rejects.toBeInstanceOf(VaultError) + await expect(service.read('path')).rejects.toMatchObject({ kind: 'NotFound', status: 404 }) + }) + }) + + describe('write', () => { + it('should write secret', async () => { + await expect(service.write({ secret: 'value' }, 'path')).resolves.toBeUndefined() + }) + + it('should expose reasons on error', async () => { + server.use( + http.post(`${vaultUrl}/v1/kv/data/:path`, () => { + return HttpResponse.json({ errors: ['No secret engine mount at test-project/'] }, { status: 400 }) + }), + ) + + await expect(service.write({ secret: 'value' }, 'path')).rejects.toBeInstanceOf(VaultError) + await expect(service.write({ secret: 'value' }, 'path')).rejects.toMatchObject({ + kind: 'HttpError', + status: 400, + reasons: ['No secret engine mount at test-project/'], + }) + await expect(service.write({ secret: 'value' }, 'path')).rejects.toThrow('Request failed') + }) + }) + + describe('delete', () => { + it('should delete secret', async () => { + await expect(service.delete('path')).resolves.toBeUndefined() + }) + }) +}) diff --git a/apps/server-nestjs/src/modules/vault/vault-client.service.ts b/apps/server-nestjs/src/modules/vault/vault-client.service.ts new file mode 100644 index 0000000000..2b39e61f77 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-client.service.ts @@ -0,0 +1,396 @@ +import type { VaultFetchOptions } from './vault-http-client.service' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { trace } from '@opentelemetry/api' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { StartActiveSpan } from '../../cpin-module/infrastructure/telemetry/telemetry.decorator' +import { VaultError, VaultHttpClientService } from './vault-http-client.service' +import { generateGitlabMirrorCredPath, generateProjectPath, generateTechReadOnlyCredPath } from './vault.utils' + +interface VaultSysPoliciesAclUpsertRequest { + policy: string +} + +interface VaultSysMountCreateRequest { + type: string + config: { + force_no_cache: boolean + } + options: { + version: number + } +} + +interface VaultSysMountTuneRequest { + options: { + version: number + } +} + +interface VaultAuthApproleRoleUpsertRequest { + secret_id_num_uses: string + secret_id_ttl: string + token_max_ttl: string + token_num_uses: string + token_ttl: string + token_type: string + token_policies: string[] +} + +interface VaultIdentityGroupUpsertRequest { + name: string + type: string + policies: string[] +} + +interface VaultIdentityGroupAliasCreateRequest { + name: string + mount_accessor: string + canonical_id: string +} + +interface VaultAuthMethod { + accessor: string + type: string + description?: string +} + +interface VaultSysAuthResponse { + data: Record +} + +interface VaultIdentityGroupResponse { + data: { + id: string + name: string + alias?: { + id?: string + name?: string + } + } +} + +export interface VaultMetadata { + created_time: string + custom_metadata: Record | null + deletion_time: string + destroyed: boolean + version: number +} + +export interface VaultSecret { + data: T + metadata: VaultMetadata +} + +export interface VaultResponse { + data: VaultSecret +} + +interface VaultListResponse { + data: { + keys: string[] + } +} + +interface VaultRoleIdResponse { + data: { + role_id: string + } +} + +interface VaultSecretIdResponse { + data: { + secret_id: string + } +} + +@Injectable() +export class VaultClientService { + private readonly logger = new Logger(VaultClientService.name) + + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(VaultHttpClientService) private readonly http: VaultHttpClientService, + ) { + } + + @StartActiveSpan() + private async fetch( + path: string, + options: VaultFetchOptions = {}, + ): Promise { + return this.http.fetch(path, options) + } + + @StartActiveSpan() + async getKvData(kvName: string, path: string): Promise> { + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.name', kvName) + span?.setAttribute('vault.kv.path', path) + this.logger.verbose(`Reading Vault KV data (kvName=${kvName}, path=${path})`) + const response = await this.fetch>(`/v1/${kvName}/data/${path}`, { method: 'GET' }) + if (!response?.data) { + throw new VaultError('InvalidResponse', 'Missing "data" field', { method: 'GET', path: `/v1/${kvName}/data/${path}` }) + } + return response.data + } + + @StartActiveSpan() + async upsertKvData(kvName: string, path: string, body: { data: T }): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.name', kvName) + span?.setAttribute('vault.kv.path', path) + this.logger.verbose(`Writing Vault KV data (kvName=${kvName}, path=${path})`) + await this.fetch(`/v1/${kvName}/data/${path}`, { method: 'POST', body }) + } + + @StartActiveSpan() + async read(path: string): Promise> { + this.logger.debug(`Reading Vault KV secret at ${path}`) + return await this.getKvData(this.config.vaultKvName, path) + } + + @StartActiveSpan() + async write(data: T, path: string): Promise { + this.logger.debug(`Writing Vault KV secret at ${path}`) + await this.upsertKvData(this.config.vaultKvName, path, { data }) + } + + @StartActiveSpan() + async delete(path: string): Promise { + this.logger.debug(`Deleting Vault KV secret at ${path}`) + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.path', path) + return await this.deleteKvMetadata(this.config.vaultKvName, path) + } + + @StartActiveSpan() + async readProjectValues(projectId: string): Promise | undefined> { + const path = generateProjectPath(this.config.projectRootDir, projectId) + this.logger.debug(`Reading Vault project values (projectId=${projectId}, path=${path})`) + this.logger.verbose(`Reading Vault project values for projectId=${projectId}`) + const secret = await this.read>(path).catch((error) => { + if (error instanceof VaultError && error.kind === 'NotFound') return null + throw error + }) + return secret?.data + } + + @StartActiveSpan() + async readGitlabMirrorCreds(projectSlug: string, repoName: string): Promise { + const vaultCredsPath = generateGitlabMirrorCredPath(this.config.projectRootDir, projectSlug, repoName) + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'repo.name': repoName, + 'vault.kv.path': vaultCredsPath, + }) + this.logger.verbose(`Reading Vault GitLab mirror credentials (projectSlug=${projectSlug}, repoName=${repoName})`) + return await this.read(vaultCredsPath).catch((error) => { + if (error instanceof VaultError && error.kind === 'NotFound') return null + throw error + }) + } + + @StartActiveSpan() + async writeGitlabMirrorCreds(projectSlug: string, repoName: string, data: Record): Promise { + const vaultCredsPath = generateGitlabMirrorCredPath(this.config.projectRootDir, projectSlug, repoName) + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'repo.name': repoName, + 'vault.kv.path': vaultCredsPath, + }) + this.logger.verbose(`Writing Vault GitLab mirror credentials (projectSlug=${projectSlug}, repoName=${repoName})`) + await this.write(data, vaultCredsPath) + } + + @StartActiveSpan() + async deleteGitlabMirrorCreds(projectSlug: string, repoName: string): Promise { + const vaultCredsPath = generateGitlabMirrorCredPath(this.config.projectRootDir, projectSlug, repoName) + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'repo.name': repoName, + 'vault.kv.path': vaultCredsPath, + }) + this.logger.verbose(`Deleting Vault GitLab mirror credentials (projectSlug=${projectSlug}, repoName=${repoName})`) + await this.delete(vaultCredsPath).catch((error) => { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + }) + } + + @StartActiveSpan() + async readTechnReadOnlyCreds(projectSlug: string): Promise { + const vaultPath = generateTechReadOnlyCredPath(this.config.projectRootDir, projectSlug) + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'vault.kv.path': vaultPath, + }) + return await this.read(vaultPath).catch((error) => { + if (error instanceof VaultError && error.kind === 'NotFound') return null + throw error + }) + } + + @StartActiveSpan() + async writeTechReadOnlyCreds(projectSlug: string, creds: Record): Promise { + const vaultPath = generateTechReadOnlyCredPath(this.config.projectRootDir, projectSlug) + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'vault.kv.path': vaultPath, + }) + await this.write(creds, vaultPath) + } + + @StartActiveSpan() + async writeMirrorTriggerToken(secret: Record): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.path', 'GITLAB') + await this.write(secret, 'GITLAB') + } + + @StartActiveSpan() + async deleteKvMetadata(kvName: string, path: string): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.name', kvName) + span?.setAttribute('vault.kv.path', path) + try { + await this.fetch(`/v1/${kvName}/metadata/${path}`, { method: 'DELETE' }) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + } + + @StartActiveSpan() + async listKvMetadata(kvName: string, path: string): Promise { + try { + const span = trace.getActiveSpan() + span?.setAttribute('vault.kv.name', kvName) + span?.setAttribute('vault.kv.path', path) + this.logger.verbose(`Listing Vault KV metadata (kvName=${kvName}, path=${path})`) + const response = await this.fetch(`/v1/${kvName}/metadata/${path}`, { method: 'LIST' }) + if (!response?.data?.keys) { + throw new VaultError('InvalidResponse', 'Missing "data.keys" field', { method: 'LIST', path: `/v1/${kvName}/metadata/${path}` }) + } + return response.data.keys + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return [] + throw error + } + } + + @StartActiveSpan() + async upsertSysPoliciesAcl(policyName: string, body: VaultSysPoliciesAclUpsertRequest): Promise { + this.logger.verbose(`Upserting Vault ACL policy ${policyName}`) + await this.fetch(`/v1/sys/policies/acl/${policyName}`, { method: 'POST', body }) + } + + @StartActiveSpan() + async deleteSysPoliciesAcl(policyName: string): Promise { + this.logger.verbose(`Deleting Vault ACL policy ${policyName}`) + await this.fetch(`/v1/sys/policies/acl/${policyName}`, { method: 'DELETE' }) + } + + @StartActiveSpan() + async createSysMount(name: string, body: VaultSysMountCreateRequest): Promise { + this.logger.verbose(`Creating Vault mount ${name} (version=${body.options.version})`) + await this.fetch(`/v1/sys/mounts/${name}`, { method: 'POST', body }) + } + + @StartActiveSpan() + async tuneSysMount(name: string, body: VaultSysMountTuneRequest): Promise { + this.logger.verbose(`Tuning Vault mount ${name} (version=${body.options.version})`) + await this.fetch(`/v1/sys/mounts/${name}/tune`, { method: 'POST', body }) + } + + @StartActiveSpan() + async deleteSysMounts(name: string): Promise { + this.logger.verbose(`Deleting Vault mount ${name}`) + await this.fetch(`/v1/sys/mounts/${name}`, { method: 'DELETE' }) + } + + @StartActiveSpan() + async upsertAuthApproleRole(roleName: string, body: VaultAuthApproleRoleUpsertRequest): Promise { + this.logger.verbose(`Upserting Vault AppRole ${roleName} (policies=${body.token_policies.length})`) + await this.fetch(`/v1/auth/approle/role/${roleName}`, { + method: 'POST', + body, + }) + } + + @StartActiveSpan() + async deleteAuthApproleRole(roleName: string): Promise { + this.logger.verbose(`Deleting Vault AppRole ${roleName}`) + await this.fetch(`/v1/auth/approle/role/${roleName}`, { method: 'DELETE' }) + } + + async getAuthApproleRoleRoleId(roleName: string): Promise { + const path = `/v1/auth/approle/role/${roleName}/role-id` + this.logger.verbose(`Reading Vault AppRole role-id for ${roleName}`) + const response = await this.fetch(path, { method: 'GET' }) + const roleId = response?.data?.role_id + if (!roleId) { + throw new VaultError('InvalidResponse', `Vault role-id not found for role ${roleName}`, { method: 'GET', path }) + } + return roleId + } + + @StartActiveSpan() + async createAuthApproleRoleSecretId(roleName: string): Promise { + const path = `/v1/auth/approle/role/${roleName}/secret-id` + this.logger.verbose(`Creating Vault AppRole secret-id for ${roleName}`) + const response = await this.fetch(path, { method: 'POST' }) + const secretId = response?.data?.secret_id + if (!secretId) { + throw new VaultError('InvalidResponse', `Vault secret-id not generated for role ${roleName}`, { method: 'POST', path }) + } + return secretId + } + + async getSysAuth(): Promise> { + const path = '/v1/sys/auth' + this.logger.verbose('Listing Vault auth methods') + const response = await this.fetch(path, { method: 'GET' }) + return response?.data ?? {} + } + + @StartActiveSpan() + async upsertIdentityGroupName(groupName: string, body: VaultIdentityGroupUpsertRequest): Promise { + this.logger.verbose(`Upserting Vault identity group ${groupName} (policies=${body.policies.length})`) + await this.fetch(`/v1/identity/group/name/${groupName}`, { + method: 'POST', + body, + }) + } + + @StartActiveSpan() + async getIdentityGroupName(groupName: string): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.identity.group.name', groupName) + const path = `/v1/identity/group/name/${groupName}` + const response = await this.fetch(path, { method: 'GET' }) + if (!response) throw new VaultError('InvalidResponse', 'Empty response', { method: 'GET', path }) + return response + } + + @StartActiveSpan() + async deleteIdentityGroupName(groupName: string): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.identity.group.name', groupName) + this.logger.verbose(`Deleting Vault identity group ${groupName}`) + await this.fetch(`/v1/identity/group/name/${groupName}`, { method: 'DELETE' }) + } + + @StartActiveSpan() + async createIdentityGroupAlias(body: VaultIdentityGroupAliasCreateRequest): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('vault.identity.group.alias', body.name) + this.logger.verbose(`Creating Vault identity group alias (aliasName=${body.name}, canonicalId=${body.canonical_id})`) + await this.fetch('/v1/identity/group-alias', { method: 'POST', body }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts b/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts new file mode 100644 index 0000000000..932b4bbba9 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-datastore.service.ts @@ -0,0 +1,58 @@ +import type { Prisma } from '@prisma/client' +import { Inject, Injectable } from '@nestjs/common' +import { PrismaService } from '../../cpin-module/infrastructure/database/prisma.service' + +export const projectSelect = { + id: true, + name: true, + slug: true, + description: true, + environments: { + select: { + id: true, + name: true, + clusterId: true, + cpu: true, + gpu: true, + memory: true, + autosync: true, + }, + }, +} satisfies Prisma.ProjectSelect + +export type ProjectWithDetails = Prisma.ProjectGetPayload<{ + select: typeof projectSelect +}> + +export const zoneSelect = { + id: true, + slug: true, +} satisfies Prisma.ZoneSelect + +export type ZoneWithDetails = Prisma.ZoneGetPayload<{ + select: typeof zoneSelect +}> + +@Injectable() +export class VaultDatastoreService { + constructor(@Inject(PrismaService) private readonly prisma: PrismaService) {} + + async getAllProjects(): Promise { + return this.prisma.project.findMany({ + select: projectSelect, + }) + } + + async getProject(id: string): Promise { + return this.prisma.project.findUnique({ + where: { id }, + select: projectSelect, + }) + } + + async getAllZones(): Promise { + return this.prisma.zone.findMany({ + select: zoneSelect, + }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault-health.service.ts b/apps/server-nestjs/src/modules/vault/vault-health.service.ts new file mode 100644 index 0000000000..33b01edbf9 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-health.service.ts @@ -0,0 +1,26 @@ +import { Inject, Injectable } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' + +@Injectable() +export class VaultHealthService { + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(HealthIndicatorService) private readonly healthIndicator: HealthIndicatorService, + ) {} + + async check(key: string) { + const indicator = this.healthIndicator.check(key) + const urlBase = this.config.getInternalOrPublicVaultUrl() + if (!urlBase) return indicator.down('Not configured') + + const url = new URL('/v1/sys/health', urlBase).toString() + try { + const response = await fetch(url) + if (response.status < 500) return indicator.up({ httpStatus: response.status }) + return indicator.down({ httpStatus: response.status }) + } catch (error) { + return indicator.down(error instanceof Error ? error.message : String(error)) + } + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault-http-client.service.ts b/apps/server-nestjs/src/modules/vault/vault-http-client.service.ts new file mode 100644 index 0000000000..28040fbe30 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault-http-client.service.ts @@ -0,0 +1,146 @@ +import { Inject, Injectable, Logger } from '@nestjs/common' +import { trace } from '@opentelemetry/api' +import z from 'zod' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { StartActiveSpan } from '../../cpin-module/infrastructure/telemetry/telemetry.decorator' + +export interface VaultFetchOptions { + method?: string + body?: unknown +} + +export type VaultErrorKind + = | 'NotConfigured' + | 'NotFound' + | 'HttpError' + | 'InvalidResponse' + | 'ParseError' + | 'Unexpected' + +export class VaultError extends Error { + readonly kind: VaultErrorKind + readonly status?: number + readonly method?: string + readonly path?: string + readonly statusText?: string + readonly reasons?: string[] + + constructor( + kind: VaultErrorKind, + message: string, + details: { status?: number, method?: string, path?: string, statusText?: string, reasons?: string[] } = {}, + ) { + super(message) + this.name = 'VaultError' + this.kind = kind + this.status = details.status + this.method = details.method + this.path = details.path + this.statusText = details.statusText + this.reasons = details.reasons + } +} + +@Injectable() +export class VaultHttpClientService { + private readonly logger = new Logger(VaultHttpClientService.name) + + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + ) {} + + @StartActiveSpan() + async fetch( + path: string, + options: VaultFetchOptions = {}, + ): Promise { + const span = trace.getActiveSpan() + const method = options.method ?? 'GET' + span?.setAttribute('vault.method', method) + span?.setAttribute('vault.path', path) + + this.logger.verbose(`Starting Vault request: ${method} ${path}`) + this.logger.debug(`Vault request: ${method} ${path}`) + const request = this.createRequest(path, method, options.body) + const response = await fetch(request).catch((error) => { + if (error instanceof Error) { + this.logger.error(`Vault request failed: ${method} ${path}: ${error.message}`, error.stack) + } else { + this.logger.error(`Vault request failed: ${method} ${path}: ${String(error)}`) + } + throw new VaultError( + 'Unexpected', + error instanceof Error ? error.message : String(error), + { method, path }, + ) + }) + span?.setAttribute('vault.http.status', response.status) + this.logger.debug(`Vault response: ${method} ${path} status=${response.status}`) + + const parsed = await this.handleResponse(response, method, path) + this.logger.verbose(`Completed Vault request: ${method} ${path} status=${response.status} outcome=${parsed === null ? 'no-content' : 'ok'}`) + return parsed + } + + private get baseUrl() { + const baseUrl = this.config.getInternalOrPublicVaultUrl() + if (!baseUrl) { + throw new VaultError('NotConfigured', 'VAULT_INTERNAL_URL or VAULT_URL is required') + } + return baseUrl + } + + private get token() { + if (!this.config.vaultToken) { + this.logger.warn('Vault token is not configured (VAULT_TOKEN is missing)') + throw new VaultError('NotConfigured', 'VAULT_TOKEN is required') + } + return this.config.vaultToken + } + + private createRequest(path: string, method: string, body?: unknown): Request { + const url = new URL(path, this.baseUrl).toString() + const headers: Record = { + 'Content-Type': 'application/json', + 'X-Vault-Token': this.token, + } + + return new Request(url, { method, headers, body: body === undefined ? undefined : JSON.stringify(body) }) + } + + private async handleResponse(response: Response, method: string, path: string): Promise { + if (response.status === 204) return null + + if (!response.ok) { + await this.throwForStatus(response, method, path) + } + + return await response.json() + } + + private async throwForStatus(response: Response, method: string, path: string): Promise { + const responseBody = await response.json() + const vaultErrorBody = z.object({ errors: z.array(z.string()) }).safeParse(responseBody) + const reasons = vaultErrorBody.success ? vaultErrorBody.data.errors : undefined + const reasonsPart = reasons?.length ? ` reasons=${reasons.join('; ')}` : '' + this.logger.warn(`Vault request returned error: ${method} ${path} status=${response.status} statusText=${response.statusText}${reasonsPart}`) + + if (response.status === 404) { + throw new VaultError('NotFound', 'Not Found', { + status: 404, + method, + path, + statusText: response.statusText, + reasons, + }) + } + + throw new VaultError('HttpError', 'Request failed', { + status: response.status, + method, + path, + statusText: response.statusText, + reasons, + }) + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault.module.ts b/apps/server-nestjs/src/modules/vault/vault.module.ts new file mode 100644 index 0000000000..8281fb16ce --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.module.ts @@ -0,0 +1,23 @@ +import { Module } from '@nestjs/common' +import { HealthIndicatorService } from '@nestjs/terminus' +import { ConfigurationModule } from '../../cpin-module/infrastructure/configuration/configuration.module' +import { InfrastructureModule } from '../../cpin-module/infrastructure/infrastructure.module' +import { VaultClientService } from './vault-client.service' +import { VaultDatastoreService } from './vault-datastore.service' +import { VaultHealthService } from './vault-health.service' +import { VaultHttpClientService } from './vault-http-client.service' +import { VaultService } from './vault.service' + +@Module({ + imports: [ConfigurationModule, InfrastructureModule], + providers: [ + HealthIndicatorService, + VaultHealthService, + VaultHttpClientService, + VaultClientService, + VaultService, + VaultDatastoreService, + ], + exports: [VaultClientService, VaultHealthService], +}) +export class VaultModule {} diff --git a/apps/server-nestjs/src/modules/vault/vault.service.spec.ts b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts new file mode 100644 index 0000000000..dd9bfcc35a --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.spec.ts @@ -0,0 +1,94 @@ +import type { TestingModule } from '@nestjs/testing' +import type { Mocked } from 'vitest' +import { Test } from '@nestjs/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { VaultClientService } from './vault-client.service' +import { VaultDatastoreService } from './vault-datastore.service' +import { VaultService } from './vault.service' + +function createVaultControllerServiceTestingModule() { + return Test.createTestingModule({ + providers: [ + VaultService, + { + provide: VaultClientService, + useValue: { + createSysMount: vi.fn().mockResolvedValue(undefined), + tuneSysMount: vi.fn().mockResolvedValue(undefined), + deleteSysMounts: vi.fn().mockResolvedValue(undefined), + upsertSysPoliciesAcl: vi.fn().mockResolvedValue(undefined), + deleteSysPoliciesAcl: vi.fn().mockResolvedValue(undefined), + upsertAuthApproleRole: vi.fn().mockResolvedValue(undefined), + deleteAuthApproleRole: vi.fn().mockResolvedValue(undefined), + upsertIdentityGroupName: vi.fn().mockResolvedValue(undefined), + getIdentityGroupName: vi.fn().mockResolvedValue({ data: { id: 'gid', name: 'p1', alias: { name: '/p1' } } }), + deleteIdentityGroupName: vi.fn().mockResolvedValue(undefined), + getSysAuth: vi.fn().mockResolvedValue({ 'oidc/': { accessor: 'oidc-accessor', type: 'oidc' } }), + createIdentityGroupAlias: vi.fn().mockResolvedValue(undefined), + listKvMetadata: vi.fn().mockResolvedValue([]), + delete: vi.fn().mockResolvedValue(undefined), + } satisfies Partial, + }, + { + provide: VaultDatastoreService, + useValue: { + getAllProjects: vi.fn(), + getAllZones: vi.fn(), + } satisfies Partial, + }, + { + provide: ConfigurationService, + useValue: { + projectRootDir: 'forge', + vaultKvName: 'kv', + } satisfies Partial, + }, + ], + }) +} + +describe('vaultService', () => { + let service: VaultService + let datastore: Mocked + let client: Mocked + + beforeEach(async () => { + const module: TestingModule = await createVaultControllerServiceTestingModule().compile() + service = module.get(VaultService) + datastore = module.get(VaultDatastoreService) + client = module.get(VaultClientService) + }) + + it('should be defined', () => { + expect(service).toBeDefined() + }) + it('should reconcile on cron', async () => { + datastore.getAllProjects.mockResolvedValue([{ slug: 'p1' }, { slug: 'p2' }] as any) + datastore.getAllZones.mockResolvedValue([{ slug: 'z1' }] as any) + + await service.handleCron() + + expect(datastore.getAllProjects).toHaveBeenCalled() + expect(datastore.getAllZones).toHaveBeenCalled() + expect(client.createSysMount).toHaveBeenCalledTimes(3) + expect(client.createSysMount).toHaveBeenCalledWith('p1', expect.any(Object)) + expect(client.createSysMount).toHaveBeenCalledWith('p2', expect.any(Object)) + expect(client.createSysMount).toHaveBeenCalledWith('zone-z1', expect.any(Object)) + }) + + it('should upsert project on event', async () => { + await service.handleUpsert({ slug: 'p1' } as any) + expect(client.createSysMount).toHaveBeenCalledWith('p1', expect.any(Object)) + }) + + it('should delete project and destroy secrets on event', async () => { + client.listKvMetadata.mockResolvedValue([]) + await service.handleDelete({ slug: 'p1' } as any) + expect(client.deleteSysMounts).toHaveBeenCalledWith('p1') + expect(client.deleteSysPoliciesAcl).toHaveBeenCalledWith('app--p1--admin') + expect(client.deleteSysPoliciesAcl).toHaveBeenCalledWith('tech--p1--ro') + expect(client.deleteAuthApproleRole).toHaveBeenCalledWith('p1') + expect(client.deleteIdentityGroupName).toHaveBeenCalledWith('p1') + }) +}) diff --git a/apps/server-nestjs/src/modules/vault/vault.service.ts b/apps/server-nestjs/src/modules/vault/vault.service.ts new file mode 100644 index 0000000000..bef9d240e1 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.service.ts @@ -0,0 +1,372 @@ +import type { ProjectWithDetails, ZoneWithDetails } from './vault-datastore.service' +import { Inject, Injectable, Logger } from '@nestjs/common' +import { OnEvent } from '@nestjs/event-emitter' +import { Cron, CronExpression } from '@nestjs/schedule' +import { trace } from '@opentelemetry/api' +import { ConfigurationService } from '../../cpin-module/infrastructure/configuration/configuration.service' +import { StartActiveSpan } from '../../cpin-module/infrastructure/telemetry/telemetry.decorator' +import { VaultClientService } from './vault-client.service' +import { VaultDatastoreService } from './vault-datastore.service' +import { VaultError } from './vault-http-client.service' +import { generateProjectPath } from './vault.utils' + +@Injectable() +export class VaultService { + private readonly logger = new Logger(VaultService.name) + + constructor( + @Inject(ConfigurationService) private readonly config: ConfigurationService, + @Inject(VaultDatastoreService) private readonly vaultDatastore: VaultDatastoreService, + @Inject(VaultClientService) private readonly client: VaultClientService, + ) { + this.logger.log('VaultService initialized') + } + + @OnEvent('project.upsert') + @StartActiveSpan() + async handleUpsert(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project upsert event for ${project.slug}`) + await this.ensureProject(project) + this.logger.log(`Vault project sync completed for ${project.slug}`) + } + + @OnEvent('project.delete') + @StartActiveSpan() + async handleDelete(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.log(`Handling a project delete event for ${project.slug}`) + await Promise.all([ + this.deleteProject(project.slug), + this.deleteProjectSecrets(project.slug), + ]) + this.logger.log(`Vault project cleanup completed for ${project.slug}`) + } + + @OnEvent('zone.upsert') + @StartActiveSpan() + async handleUpsertZone(zone: ZoneWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('zone.slug', zone.slug) + this.logger.log(`Handling a zone upsert event for ${zone.slug}`) + await this.ensureZone(zone) + this.logger.log(`Vault zone sync completed for ${zone.slug}`) + } + + @OnEvent('zone.delete') + @StartActiveSpan() + async handleDeleteZone(zone: ZoneWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('zone.slug', zone.slug) + this.logger.log(`Handling a zone delete event for ${zone.slug}`) + await this.deleteZone(zone.slug) + this.logger.log(`Vault zone cleanup completed for ${zone.slug}`) + } + + @Cron(CronExpression.EVERY_HOUR) + @StartActiveSpan() + async handleCron() { + const span = trace.getActiveSpan() + this.logger.log('Starting Vault reconciliation') + const [projects, zones] = await Promise.all([ + this.vaultDatastore.getAllProjects(), + this.vaultDatastore.getAllZones(), + ]) + + span?.setAttributes({ + 'vault.projects.count': projects.length, + 'vault.zones.count': zones.length, + }) + this.logger.log(`Loaded state for Vault reconciliation (projects=${projects.length}, zones=${zones.length})`) + await Promise.all([ + this.ensureProjects(projects), + this.ensureZones(zones), + ]) + this.logger.log(`Vault reconciliation completed (projects=${projects.length} zones=${zones.length})`) + } + + @StartActiveSpan() + private async ensureProjects(projects: ProjectWithDetails[]) { + const span = trace.getActiveSpan() + span?.setAttribute('vault.projects.count', projects.length) + this.logger.verbose(`Reconciling Vault projects (count=${projects.length})`) + await Promise.all(projects.map(p => this.ensureProject(p))) + } + + @StartActiveSpan() + private async ensureProject(project: ProjectWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + this.logger.verbose(`Reconciling Vault project ${project.slug}`) + await this.upsertProject(project) + } + + @StartActiveSpan() + private async ensureZones(zones: ZoneWithDetails[]) { + const span = trace.getActiveSpan() + span?.setAttribute('vault.zones.count', zones.length) + this.logger.verbose(`Reconciling Vault zones (count=${zones.length})`) + await Promise.all(zones.map(z => this.ensureZone(z))) + } + + @StartActiveSpan() + private async ensureZone(zone: ZoneWithDetails) { + const span = trace.getActiveSpan() + span?.setAttribute('zone.slug', zone.slug) + this.logger.verbose(`Reconciling Vault zone ${zone.slug}`) + await this.upsertZone(zone.slug) + } + + private async upsertMount(kvName: string): Promise { + const createBody = { + type: 'kv', + config: { + force_no_cache: true, + }, + options: { + version: 2, + }, + } + const tuneBody = { + options: { + version: 2, + }, + } + try { + await this.client.createSysMount(kvName, createBody) + this.logger.log(`Created Vault mount ${kvName}`) + } catch (error) { + if (error instanceof VaultError && error.kind === 'HttpError' && error.status === 400) { + await this.client.tuneSysMount(kvName, tuneBody) + this.logger.log(`Vault mount ${kvName} already existed, so it was tuned to the expected settings`) + return + } + throw error + } + } + + private async deleteMount(kvName: string): Promise { + try { + await this.client.deleteSysMounts(kvName) + this.logger.log(`Deleted Vault mount ${kvName}`) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') { + this.logger.warn(`Vault mount ${kvName} was already missing`) + return + } + throw error + } + } + + @StartActiveSpan() + async upsertZone(zoneName: string): Promise { + const kvName = generateZoneName(zoneName) + const span = trace.getActiveSpan() + span?.setAttribute('zone.name', zoneName) + span?.setAttribute('vault.kv.name', kvName) + const policyName = generateZoneTechReadOnlyPolicyName(zoneName) + + await this.upsertMount(kvName) + await this.client.upsertSysPoliciesAcl(policyName, { + policy: `path "${kvName}/*" { capabilities = ["read"] }`, + }) + await this.client.upsertAuthApproleRole(kvName, generateApproleRoleBody([policyName])) + } + + @StartActiveSpan() + async deleteZone(zoneName: string): Promise { + const kvName = generateZoneName(zoneName) + const span = trace.getActiveSpan() + span?.setAttribute('zone.name', zoneName) + span?.setAttribute('vault.kv.name', kvName) + const policyName = generateZoneTechReadOnlyPolicyName(zoneName) + const roleName = kvName + + await this.deleteMount(kvName) + + const settled = await Promise.allSettled([ + this.client.deleteSysPoliciesAcl(policyName), + this.client.deleteAuthApproleRole(roleName), + ]) + + for (const result of settled) { + if (result.status !== 'rejected') continue + const error = result.reason + if (error instanceof VaultError && error.kind === 'NotFound') continue + throw error + } + } + + @StartActiveSpan() + async upsertProject(project: ProjectWithDetails): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', project.slug) + span?.setAttribute('vault.kv.name', project.slug) + const appPolicyName = generateAppAdminPolicyName(project) + const techPolicyName = generateTechReadOnlyPolicyName(project) + await this.upsertMount(project.slug) + await Promise.all([ + this.createAppAdminPolicy(appPolicyName, project.slug), + this.createTechReadOnlyPolicy(techPolicyName, project.slug), + this.ensureProjectGroup(project.slug, appPolicyName), + this.client.upsertAuthApproleRole(project.slug, generateApproleRoleBody([techPolicyName, appPolicyName])), + ]) + } + + @StartActiveSpan() + async deleteProject(projectSlug: string): Promise { + const span = trace.getActiveSpan() + span?.setAttribute('project.slug', projectSlug) + span?.setAttribute('vault.kv.name', projectSlug) + const appPolicyName = generateAppAdminPolicyName({ slug: projectSlug } as ProjectWithDetails) + const techPolicyName = generateTechReadOnlyPolicyName({ slug: projectSlug } as ProjectWithDetails) + + await this.deleteMount(projectSlug) + + const settled = await Promise.allSettled([ + this.client.deleteSysPoliciesAcl(appPolicyName), + this.client.deleteSysPoliciesAcl(techPolicyName), + this.client.deleteAuthApproleRole(projectSlug), + this.client.deleteIdentityGroupName(projectSlug), + ]) + for (const result of settled) { + if (result.status !== 'rejected') continue + const error = result.reason + if (error instanceof VaultError && error.kind === 'NotFound') continue + throw error + } + } + + @StartActiveSpan() + private async ensureProjectGroup(groupName: string, policyName: string): Promise { + const span = trace.getActiveSpan() + span?.setAttributes({ + 'vault.group.name': groupName, + 'vault.policy.name': policyName, + }) + await this.client.upsertIdentityGroupName(groupName, { + name: groupName, + type: 'external', + policies: [policyName], + }) + + const groupResult = await this.client.getIdentityGroupName(groupName) + if (!groupResult.data?.id) { + throw new VaultError('InvalidResponse', `Vault group not found after upsert: ${groupName}`, { method: 'GET', path: `/v1/identity/group/name/${groupName}` }) + } + + const groupAliasName = `/${groupName}` + if (groupResult.data.alias?.name === groupAliasName) return + + const methods = await this.client.getSysAuth() + const oidc = methods['oidc/'] + if (!oidc?.accessor) { + throw new VaultError('InvalidResponse', 'Vault OIDC auth method not found (expected "oidc/")', { method: 'GET', path: '/v1/sys/auth' }) + } + try { + span?.setAttributes({ + 'vault.group.alias.name': groupAliasName, + 'vault.oidc.accessor': oidc.accessor, + }) + await this.client.createIdentityGroupAlias({ + name: groupAliasName, + mount_accessor: oidc.accessor, + canonical_id: groupResult.data.id, + }) + } catch (error) { + if (error instanceof VaultError && error.kind === 'HttpError' && error.status === 400) return + throw error + } + } + + async createAppAdminPolicy(name: string, projectSlug: string): Promise { + await this.client.upsertSysPoliciesAcl(name, { + policy: `path "${projectSlug}/*" { capabilities = ["create", "read", "update", "delete", "list"] }`, + }) + } + + async createTechReadOnlyPolicy(name: string, projectSlug: string): Promise { + await this.client.upsertSysPoliciesAcl(name, { + policy: `path "${this.config.vaultKvName}/data/${projectSlug}/REGISTRY/ro-robot" { capabilities = ["read"] }`, + }) + } + + async listProjectSecrets(projectSlug: string): Promise { + const projectPath = generateProjectPath(this.config.projectRootDir, projectSlug) + return this.listRecursive(this.config.vaultKvName, projectPath, '') + } + + @StartActiveSpan() + async deleteProjectSecrets(projectSlug: string): Promise { + const span = trace.getActiveSpan() + span?.setAttributes({ + 'project.slug': projectSlug, + 'vault.kv.name': this.config.vaultKvName, + }) + const secrets = await this.listProjectSecrets(projectSlug) + span?.setAttribute('vault.secrets.count', secrets.length) + + const projectPath = generateProjectPath(this.config.projectRootDir, projectSlug) + await Promise.allSettled(secrets.map(async (relativePath) => { + const fullPath = `${projectPath}/${relativePath}` + try { + await this.client.delete(fullPath) + } catch (error) { + if (error instanceof VaultError && error.kind === 'NotFound') return + throw error + } + })) + } + + private async listRecursive( + kvName: string, + basePath: string, + relativePath: string, + ): Promise { + const combined = relativePath.length === 0 ? basePath : `${basePath}/${relativePath}` + const keys = await this.client.listKvMetadata(kvName, combined) + if (keys.length === 0) return [] + + const results: string[] = [] + for (const key of keys) { + if (key.endsWith('/')) { + const nestedRel = relativePath.length === 0 ? key.slice(0, -1) : `${relativePath}/${key.slice(0, -1)}` + const nested = await this.listRecursive(kvName, basePath, nestedRel) + results.push(...nested) + } else { + results.push(relativePath.length === 0 ? key : `${relativePath}/${key}`) + } + } + return results + } +} + +function generateTechReadOnlyPolicyName(project: ProjectWithDetails) { + return `tech--${project.slug}--ro` +} + +function generateAppAdminPolicyName(project: ProjectWithDetails) { + return `app--${project.slug}--admin` +} + +function generateZoneName(name: string) { + return `zone-${name}` +} + +function generateZoneTechReadOnlyPolicyName(zoneName: string) { + return `tech--${generateZoneName(zoneName)}--ro` +} + +function generateApproleRoleBody(policies: string[]) { + return { + secret_id_num_uses: '0', + secret_id_ttl: '0', + token_max_ttl: '0', + token_num_uses: '0', + token_ttl: '0', + token_type: 'batch', + token_policies: policies, + } +} diff --git a/apps/server-nestjs/src/modules/vault/vault.utils.ts b/apps/server-nestjs/src/modules/vault/vault.utils.ts new file mode 100644 index 0000000000..0606286188 --- /dev/null +++ b/apps/server-nestjs/src/modules/vault/vault.utils.ts @@ -0,0 +1,17 @@ +export function generateProjectPath(projectRootDir: string | undefined, projectSlug: string) { + return projectRootDir + ? `${projectRootDir}/${projectSlug}` + : projectSlug +} + +export function generateGitlabMirrorCredPath(projectRootDir: string | undefined, projectSlug: string, repoName: string) { + return projectRootDir + ? `${generateProjectPath(projectRootDir, projectSlug)}/${repoName}-mirror` + : `${projectSlug}/${repoName}-mirror` +} + +export function generateTechReadOnlyCredPath(projectRootDir: string | undefined, projectSlug: string) { + return projectRootDir + ? `${generateProjectPath(projectRootDir, projectSlug)}/tech/GITLAB_MIRROR` + : `${projectSlug}/tech/GITLAB_MIRROR` +} diff --git a/apps/server-nestjs/src/prisma/schema/project.prisma b/apps/server-nestjs/src/prisma/schema/project.prisma index 833845eee1..d45ccf451e 100644 --- a/apps/server-nestjs/src/prisma/schema/project.prisma +++ b/apps/server-nestjs/src/prisma/schema/project.prisma @@ -5,6 +5,7 @@ model Environment { memory Float @db.Real cpu Float @db.Real gpu Float @db.Real + autosync Boolean @default(true) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt clusterId String @db.Uuid diff --git a/apps/server-nestjs/src/utils/iterable.ts b/apps/server-nestjs/src/utils/iterable.ts index 04b0a70cc3..a46ef80632 100644 --- a/apps/server-nestjs/src/utils/iterable.ts +++ b/apps/server-nestjs/src/utils/iterable.ts @@ -17,3 +17,10 @@ export async function getAll( } return items } + +export async function find(generator: AsyncGenerator, predicate: (item: T) => boolean): Promise { + for await (const item of generator) { + if (predicate(item)) return item + } + return undefined +} diff --git a/apps/server-nestjs/test/argocd.e2e-spec.ts b/apps/server-nestjs/test/argocd.e2e-spec.ts new file mode 100644 index 0000000000..1c89e31590 --- /dev/null +++ b/apps/server-nestjs/test/argocd.e2e-spec.ts @@ -0,0 +1,308 @@ +import type { CommitAction, Gitlab } from '@gitbeaker/core' +import type { TestingModule } from '@nestjs/testing' +import { faker } from '@faker-js/faker' +import { Test } from '@nestjs/testing' +import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest' +import { parse } from 'yaml' +import { ConfigurationModule } from '../src/cpin-module/infrastructure/configuration/configuration.module' +import { ConfigurationService } from '../src/cpin-module/infrastructure/configuration/configuration.service' +import { PrismaService } from '../src/cpin-module/infrastructure/database/prisma.service' +import { InfrastructureModule } from '../src/cpin-module/infrastructure/infrastructure.module' +import { projectSelect } from '../src/modules/argocd/argocd-datastore.service' +import { ArgoCDModule } from '../src/modules/argocd/argocd.module' +import { ArgoCDService } from '../src/modules/argocd/argocd.service' +import { GITLAB_REST_CLIENT, GitlabClientService } from '../src/modules/gitlab/gitlab-client.service' +import { VaultClientService } from '../src/modules/vault/vault-client.service' + +const canRunArgoCDE2E + = Boolean(process.env.E2E) + && Boolean(process.env.GITLAB_URL) + && Boolean(process.env.GITLAB_TOKEN) + && Boolean(process.env.VAULT_URL) + && Boolean(process.env.VAULT_TOKEN) + && Boolean(process.env.PROJECTS_ROOT_DIR) + && Boolean(process.env.DB_URL) + +const describeWithArgoCD = describe.runIf(canRunArgoCDE2E) + +describeWithArgoCD('ArgoCDController (e2e)', {}, () => { + let moduleRef: TestingModule + let argocdController: ArgoCDService + let gitlab: GitlabClientService + let gitlabClient: Gitlab + let vault: VaultClientService + let prisma: PrismaService + let config: ConfigurationService + + let ownerId: string + let testProjectId: string + let testProjectSlug: string + + let zoneId: string + let zoneSlug: string + let kubeconfigId: string + let clusterId: string + let clusterLabel: string + let stageId: string + let envDevId: string + let envProdId: string + let envDevName: string + let envProdName: string + + let infraRepoId: number | undefined + let infraRepoPath: string + let vaultProjectValuesPath: string | undefined + + beforeAll(async () => { + moduleRef = await Test.createTestingModule({ + imports: [ArgoCDModule, ConfigurationModule, InfrastructureModule], + }).compile() + + await moduleRef.init() + + argocdController = moduleRef.get(ArgoCDService) + gitlab = moduleRef.get(GitlabClientService) + gitlabClient = moduleRef.get(GITLAB_REST_CLIENT) + vault = moduleRef.get(VaultClientService) + prisma = moduleRef.get(PrismaService) + config = moduleRef.get(ConfigurationService) + + ownerId = faker.string.uuid() + testProjectId = faker.string.uuid() + testProjectSlug = faker.helpers.slugify(`test-project-${faker.string.uuid()}`) + + zoneId = faker.string.uuid() + zoneSlug = faker.string.alphanumeric({ length: 10 }).toLowerCase() + kubeconfigId = faker.string.uuid() + clusterId = faker.string.uuid() + clusterLabel = faker.helpers.slugify(`cluster-${faker.string.uuid()}`.slice(0, 40)) + stageId = faker.string.uuid() + envDevId = faker.string.uuid() + envProdId = faker.string.uuid() + envDevName = 'dev' + envProdName = 'prod' + + await prisma.user.create({ + data: { + id: ownerId, + email: faker.internet.email().toLowerCase(), + firstName: 'Test', + lastName: 'Owner', + type: 'human', + }, + }) + + await prisma.zone.create({ + data: { + id: zoneId, + slug: zoneSlug, + label: `Zone ${zoneSlug}`, + argocdUrl: 'https://example.com', + }, + }) + + await prisma.kubeconfig.create({ + data: { + id: kubeconfigId, + user: { token: faker.string.alphanumeric({ length: 16 }) }, + cluster: { server: 'https://example.com' }, + }, + }) + + await prisma.cluster.create({ + data: { + id: clusterId, + label: clusterLabel, + secretName: faker.string.uuid(), + kubeConfigId: kubeconfigId, + infos: null, + memory: 100, + cpu: 100, + gpu: 0, + zoneId, + }, + }) + + await prisma.stage.create({ + data: { + id: stageId, + name: faker.helpers.slugify(`stage-${faker.string.uuid()}`), + }, + }) + + await prisma.project.create({ + data: { + id: testProjectId, + slug: testProjectSlug, + name: testProjectSlug, + ownerId, + description: 'E2E Test Project', + hprodCpu: 0, + hprodGpu: 0, + hprodMemory: 0, + prodCpu: 0, + prodGpu: 0, + prodMemory: 0, + clusters: { + connect: { id: clusterId }, + }, + repositories: { + create: { + internalRepoName: zoneSlug, + isInfra: true, + deployRevision: 'HEAD', + deployPath: '.', + helmValuesFiles: '', + }, + }, + environments: { + create: [ + { + id: envDevId, + name: envDevName, + clusterId, + stageId, + cpu: 1, + gpu: 0, + memory: 1, + autosync: true, + }, + { + id: envProdId, + name: envProdName, + clusterId, + stageId, + cpu: 1, + gpu: 0, + memory: 1, + autosync: true, + }, + ], + }, + }, + }) + + infraRepoPath = `${config.projectRootDir}/infra/${zoneSlug}` + try { + const existing = await gitlabClient.Projects.show(infraRepoPath) + if (existing.empty_repo || existing.default_branch !== 'main') { + await gitlabClient.Projects.remove(existing.id).catch(() => {}) + throw new Error('Recreate infra repo') + } + infraRepoId = existing.id + } catch (error: any) { + const description = error?.cause?.description ?? '' + if ( + !(typeof description === 'string' && description.includes('404')) + && !(error instanceof Error && error.message === 'Recreate infra repo') + ) { + throw error + } + + const infraGroup = await gitlab.getOrCreateProjectSubGroup('infra') + const created = await gitlabClient.Projects.create({ + name: zoneSlug, + path: zoneSlug, + namespaceId: infraGroup.id, + initializeWithReadme: true, + defaultBranch: 'main', + } as any) + infraRepoId = created.id + } + + vaultProjectValuesPath = `${config.projectRootDir}/${testProjectId}` + await vault.write({ e2e: true }, vaultProjectValuesPath) + }) + + afterAll(async () => { + if (vaultProjectValuesPath) { + await vault.delete(vaultProjectValuesPath).catch(() => {}) + } + + if (infraRepoId) { + await gitlabClient.Projects.remove(infraRepoId).catch(() => {}) + } + + if (prisma) { + await prisma.project.deleteMany({ where: { id: testProjectId } }).catch(() => {}) + await prisma.stage.deleteMany({ where: { id: stageId } }).catch(() => {}) + await prisma.cluster.deleteMany({ where: { id: clusterId } }).catch(() => {}) + await prisma.kubeconfig.deleteMany({ where: { id: kubeconfigId } }).catch(() => {}) + await prisma.zone.deleteMany({ where: { id: zoneId } }).catch(() => {}) + await prisma.user.deleteMany({ where: { id: ownerId } }).catch(() => {}) + } + + await moduleRef.close() + + vi.restoreAllMocks() + vi.unstubAllEnvs() + }) + + it('should commit environment values and cleanup stale values in the zone infra repo', async () => { + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + const infraProject = await gitlab.getOrCreateInfraGroupRepo(zoneSlug) + infraRepoId = infraProject.id + + const staleFilePath = `${project.name}/${clusterLabel}/stale/values.yaml` + if (!infraRepoId) throw new Error('Missing infra repo id') + const staleAction = await gitlab.generateCreateOrUpdateAction(infraProject, 'main', staleFilePath, 'stale: true\n') + await gitlab.maybeCreateCommit(infraProject, 'ci: :robot_face: Seed stale values', staleAction ? [staleAction] : []) + + await argocdController.handleUpsert(project) + + const expectedFilePath = `${project.name}/${clusterLabel}/${envDevName}/values.yaml` + const file = await gitlabClient.RepositoryFiles.show(infraRepoId, expectedFilePath, 'main') + const raw = Buffer.from(file.content, 'base64').toString('utf8') + const values = parse(raw) + + expect(values?.common?.['dso/project.slug']).toBe(testProjectSlug) + expect(values?.common?.['dso/environment']).toBe(envDevName) + expect(values?.environment?.valueFilePath).toBe(expectedFilePath) + expect(values?.application?.destination?.name).toBe(clusterLabel) + expect(values?.application?.autosync).toBe(true) + + const shouldBeDeleted = await gitlab.getFile(infraProject, staleFilePath, 'main') + expect(shouldBeDeleted).toBeUndefined() + }, 144000) + + it('should update existing values and delete values of a removed environment', async () => { + const before = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + const infraProject = await gitlab.getOrCreateInfraGroupRepo(zoneSlug) + infraRepoId = infraProject.id + + const devFilePath = `${before.name}/${clusterLabel}/${envDevName}/values.yaml` + const prodFilePath = `${before.name}/${clusterLabel}/${envProdName}/values.yaml` + + const seededActions = (await Promise.all([ + gitlab.generateCreateOrUpdateAction(infraProject, 'main', devFilePath, 'old: true\n'), + gitlab.generateCreateOrUpdateAction(infraProject, 'main', prodFilePath, 'old: true\n'), + ])).filter((action): action is NonNullable => action !== null) + await gitlab.maybeCreateCommit(infraProject, 'ci: :robot_face: Seed existing values', seededActions as CommitAction[]) + + await prisma.environment.deleteMany({ where: { id: envProdId } }) + + const after = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + await argocdController.handleUpsert(after) + + const updatedDev = await gitlabClient.RepositoryFiles.show(infraRepoId, devFilePath, 'main') + const devRaw = Buffer.from(updatedDev.content, 'base64').toString('utf8') + const devValues = parse(devRaw) + expect(devValues?.common?.['dso/project.slug']).toBe(testProjectSlug) + expect(devValues?.common?.['dso/environment']).toBe(envDevName) + + const prodFile = await gitlab.getFile(infraProject, prodFilePath, 'main') + expect(prodFile).toBeUndefined() + }, 72000) +}) diff --git a/apps/server-nestjs/test/gitlab.e2e-spec.ts b/apps/server-nestjs/test/gitlab.e2e-spec.ts new file mode 100644 index 0000000000..f4585eda3d --- /dev/null +++ b/apps/server-nestjs/test/gitlab.e2e-spec.ts @@ -0,0 +1,223 @@ +import type { ExpandedUserSchema, Gitlab } from '@gitbeaker/core' +import type { TestingModule } from '@nestjs/testing' +import { faker } from '@faker-js/faker' +import { Test } from '@nestjs/testing' +import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest' +import z from 'zod' +import { ConfigurationModule } from '../src/cpin-module/infrastructure/configuration/configuration.module' +import { ConfigurationService } from '../src/cpin-module/infrastructure/configuration/configuration.service' +import { PrismaService } from '../src/cpin-module/infrastructure/database/prisma.service' +import { InfrastructureModule } from '../src/cpin-module/infrastructure/infrastructure.module' +import { GITLAB_REST_CLIENT, GitlabClientService } from '../src/modules/gitlab/gitlab-client.service' +import { projectSelect } from '../src/modules/gitlab/gitlab-datastore.service' +import { GitlabModule } from '../src/modules/gitlab/gitlab.module' +import { GitlabService } from '../src/modules/gitlab/gitlab.service' +import { VaultClientService } from '../src/modules/vault/vault-client.service' + +const canRunGitlabE2E + = Boolean(process.env.E2E) + && Boolean(process.env.GITLAB_URL) + && Boolean(process.env.GITLAB_TOKEN) + && Boolean(process.env.VAULT_URL) + && Boolean(process.env.VAULT_TOKEN) + && Boolean(process.env.PROJECTS_ROOT_DIR) + && Boolean(process.env.DB_URL) + +const describeWithGitLab = describe.runIf(canRunGitlabE2E) + +describeWithGitLab('GitlabController (e2e)', {}, () => { + let moduleRef: TestingModule + let gitlabController: GitlabService + let gitlabService: GitlabClientService + let gitlabClient: Gitlab + let vaultService: VaultClientService + let prisma: PrismaService + let config: ConfigurationService + + let testProjectId: string + let testProjectSlug: string + let ownerId: string + let ownerUser: ExpandedUserSchema + + beforeAll(async () => { + moduleRef = await Test.createTestingModule({ + imports: [GitlabModule, ConfigurationModule, InfrastructureModule], + }).compile() + + await moduleRef.init() + + gitlabController = moduleRef.get(GitlabService) + gitlabService = moduleRef.get(GitlabClientService) + gitlabClient = moduleRef.get(GITLAB_REST_CLIENT) + vaultService = moduleRef.get(VaultClientService) + prisma = moduleRef.get(PrismaService) + config = moduleRef.get(ConfigurationService) + + ownerId = faker.string.uuid() + testProjectId = faker.string.uuid() + testProjectSlug = faker.helpers.slugify(`test-project-${faker.string.uuid()}`) + + const ownerEmail = `test-owner-${ownerId}@example.com` + + // Create owner in GitLab + ownerUser = await gitlabClient.Users.create({ + name: 'Test Owner', + password: faker.internet.password({ length: 24 }), + username: `test-owner-${ownerId}`, + email: ownerEmail, + skipConfirmation: true, + }) + + // Create owner in DB + await prisma.user.create({ + data: { + id: ownerId, + email: ownerUser.email.toLowerCase(), + firstName: 'Test', + lastName: 'Owner', + type: 'human', + }, + }) + }) + + afterAll(async () => { + // Clean GitLab group + if (testProjectSlug && config.projectRootDir) { + const fullPath = `${config.projectRootDir}/${testProjectSlug}` + const group = await gitlabService.getGroupByPath(fullPath) + if (group) { + await gitlabService.deleteGroup(group).catch(() => {}) + } + } + + // Clean Vault + if (testProjectSlug && config.projectRootDir) { + const vaultPath = `${config.projectRootDir}/${testProjectSlug}` + await vaultService.delete(`${vaultPath}/tech/GITLAB_MIRROR`).catch(() => {}) + await vaultService.delete(`${vaultPath}/app-mirror`).catch(() => {}) + } + + // Clean DB + if (prisma) { + await prisma.projectMembers.deleteMany({ where: { projectId: testProjectId } }).catch(() => {}) + await prisma.project.deleteMany({ where: { id: testProjectId } }).catch(() => {}) + await prisma.user.deleteMany({ where: { id: ownerId } }).catch(() => {}) + } + + await moduleRef.close() + + vi.restoreAllMocks() + vi.unstubAllEnvs() + }) + + it('should reconcile and create project group in GitLab and Vault secrets', async () => { + // Create Project in DB + await prisma.project.create({ + data: { + id: testProjectId, + slug: testProjectSlug, + name: testProjectSlug, + ownerId, + description: 'E2E Test Project', + hprodCpu: 0, + hprodGpu: 0, + hprodMemory: 0, + prodCpu: 0, + prodGpu: 0, + prodMemory: 0, + }, + }) + + await prisma.repository.create({ + data: { + projectId: testProjectId, + internalRepoName: 'app', + externalRepoUrl: 'https://example.com/example.git', + isPrivate: false, + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + // Act + await gitlabController.handleUpsert(project) + + // Assert + const groupPath = `${config.projectRootDir}/${testProjectSlug}` + const group = z.object({ + id: z.number(), + name: z.string(), + full_path: z.string(), + web_url: z.string(), + }).parse(await gitlabService.getGroupByPath(groupPath)) + expect(group.full_path).toBe(groupPath) + + // Check membership + const members = await gitlabService.getGroupMembers(group) + const isMember = members.some(m => m.id === ownerUser.id) + expect(isMember).toBe(true) + + const repoVaultPath = `${config.projectRootDir}/${testProjectSlug}/app-mirror` + const repoSecret = await vaultService.read(repoVaultPath) + expect(repoSecret?.data?.GIT_OUTPUT_USER).toBeTruthy() + expect(repoSecret?.data?.GIT_OUTPUT_PASSWORD).toBeTruthy() + }, 72000) + + it('should add member to GitLab group when added in DB', async () => { + // Create user in GitLab + const newUserId = faker.string.uuid() + const newUser = await gitlabClient.Users.create({ + email: faker.internet.email().toLowerCase(), + username: faker.internet.username(), + name: `${faker.person.firstName()} ${faker.person.lastName()}`, + password: faker.internet.password({ length: 24 }), + skipConfirmation: true, + }) + + // Create user in DB + await prisma.user.create({ + data: { + id: newUserId, + email: newUser.email, + firstName: 'Test', + lastName: 'User', + type: 'human', + }, + }) + + // Add member to project in DB + await prisma.projectMembers.create({ + data: { + projectId: testProjectId, + userId: newUserId, + roleIds: [], // No roles for now + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + // Act + await gitlabController.handleUpsert(project) + + // Assert + const groupPath = `${config.projectRootDir}/${testProjectSlug}` + const group = z.object({ + id: z.number(), + name: z.string(), + web_url: z.string(), + }).parse(await gitlabService.getGroupByPath(groupPath)) + + const members = await gitlabService.getGroupMembers(group) + const isNewMemberPresent = members.some(m => m.id === newUser.id) + expect(isNewMemberPresent).toBe(true) + + await prisma.projectMembers.deleteMany({ where: { userId: newUserId } }).catch(() => {}) + await prisma.user.delete({ where: { id: newUserId } }).catch(() => {}) + }, 72000) +}) diff --git a/apps/server-nestjs/test/vault.e2e-spec.ts b/apps/server-nestjs/test/vault.e2e-spec.ts new file mode 100644 index 0000000000..0fa5941c37 --- /dev/null +++ b/apps/server-nestjs/test/vault.e2e-spec.ts @@ -0,0 +1,102 @@ +import type { TestingModule } from '@nestjs/testing' +import { faker } from '@faker-js/faker' +import { Test } from '@nestjs/testing' +import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest' +import { ConfigurationModule } from '../src/cpin-module/infrastructure/configuration/configuration.module' +import { PrismaService } from '../src/cpin-module/infrastructure/database/prisma.service' +import { InfrastructureModule } from '../src/cpin-module/infrastructure/infrastructure.module' +import { VaultClientService } from '../src/modules/vault/vault-client.service' +import { projectSelect } from '../src/modules/vault/vault-datastore.service' +import { VaultModule } from '../src/modules/vault/vault.module' +import { VaultService } from '../src/modules/vault/vault.service' + +const canRunVaultE2E + = Boolean(process.env.E2E) + && Boolean(process.env.VAULT_URL) + && Boolean(process.env.VAULT_TOKEN) + && Boolean(process.env.DB_URL) + +const describeWithVault = describe.runIf(canRunVaultE2E) + +describeWithVault('VaultController (e2e)', () => { + let moduleRef: TestingModule + let vaultController: VaultService + let vaultClient: VaultClientService + let prisma: PrismaService + + let ownerId: string + let testProjectId: string + let testProjectSlug: string + + beforeAll(async () => { + moduleRef = await Test.createTestingModule({ + imports: [VaultModule, ConfigurationModule, InfrastructureModule], + }).compile() + + await moduleRef.init() + + vaultController = moduleRef.get(VaultService) + vaultClient = moduleRef.get(VaultClientService) + prisma = moduleRef.get(PrismaService) + + ownerId = faker.string.uuid() + testProjectId = faker.string.uuid() + testProjectSlug = faker.helpers.slugify(`test-project-${faker.string.uuid()}`) + + await prisma.user.create({ + data: { + id: ownerId, + email: faker.internet.email().toLowerCase(), + firstName: 'Test', + lastName: 'Owner', + type: 'human', + }, + }) + }) + + afterAll(async () => { + if (testProjectSlug) { + await vaultController.handleDelete({ slug: testProjectSlug } as any).catch(() => {}) + } + + if (prisma) { + await prisma.project.deleteMany({ where: { id: testProjectId } }).catch(() => {}) + await prisma.user.deleteMany({ where: { id: ownerId } }).catch(() => {}) + } + + await moduleRef.close() + + vi.restoreAllMocks() + vi.unstubAllEnvs() + }) + + it('should reconcile project in Vault (mount, group, role)', async () => { + await prisma.project.create({ + data: { + id: testProjectId, + slug: testProjectSlug, + name: testProjectSlug, + ownerId, + description: 'E2E Test Project', + hprodCpu: 0, + hprodGpu: 0, + hprodMemory: 0, + prodCpu: 0, + prodGpu: 0, + prodMemory: 0, + }, + }) + + const project = await prisma.project.findUniqueOrThrow({ + where: { id: testProjectId }, + select: projectSelect, + }) + + await vaultController.handleUpsert(project) + + const group = await vaultClient.getIdentityGroupName(testProjectSlug) + expect(group.data?.id).toBeTruthy() + expect(group.data?.name).toBe(testProjectSlug) + expect(group.data?.alias?.name).toBe(`/${testProjectSlug}`) + }, 180000) +}) diff --git a/apps/server-nestjs/vitest.config.ts b/apps/server-nestjs/vitest.config.ts index c5d6ab2bc3..b0337f6d0b 100644 --- a/apps/server-nestjs/vitest.config.ts +++ b/apps/server-nestjs/vitest.config.ts @@ -2,16 +2,18 @@ import path from 'node:path' import { defineConfig } from 'vitest/config' export default defineConfig({ + resolve: { + alias: [ + { find: '@cpn-console/logger/hooks', replacement: path.resolve(__dirname, '../../packages/logger/src/hooks.ts') }, + { find: '@cpn-console/logger', replacement: path.resolve(__dirname, '../../packages/logger/src/index.ts') }, + { find: '@cpn-console/shared', replacement: path.resolve(__dirname, '../../packages/shared/src/index.ts') }, + { find: '@cpn-console/hooks', replacement: path.resolve(__dirname, '../../packages/hooks/src/index.ts') }, + ], + }, test: { globals: true, environment: 'node', include: ['src/**/*.spec.ts', 'test/**/*.e2e-spec.ts'], - alias: { - '@cpn-console/shared': path.resolve(__dirname, '../../packages/shared/src/index.ts'), - '@cpn-console/hooks': path.resolve(__dirname, '../../packages/hooks/src/index.ts'), - '@cpn-console/logger': path.resolve(__dirname, '../../packages/logger/src/index.ts'), - '@cpn-console/logger/hooks': path.resolve(__dirname, '../../packages/logger/src/hooks.ts'), - }, coverage: { provider: 'v8', reporter: ['text', 'json', 'html'], diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index da9223813b..8e9e62a028 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -403,6 +403,9 @@ importers: '@gitbeaker/core': specifier: ^40.6.0 version: 40.6.0 + '@gitbeaker/requester-utils': + specifier: ^40.6.0 + version: 40.6.0 '@gitbeaker/rest': specifier: ^40.6.0 version: 40.6.0 @@ -511,6 +514,9 @@ importers: vitest-mock-extended: specifier: ^2.0.2 version: 2.0.2(typescript@5.9.3)(vitest@2.1.9(@types/node@22.19.15)(jsdom@25.0.1)(msw@2.12.10(@types/node@22.19.15)(typescript@5.9.3))(terser@5.46.0)) + yaml: + specifier: ^2.7.1 + version: 2.8.2 zod: specifier: ^3.25.76 version: 3.25.76