From ed1946aea90d15aecde2cf91e8fabd523687b8de Mon Sep 17 00:00:00 2001 From: Warren Krewenki <19960+krewenki@users.noreply.github.com> Date: Fri, 24 Apr 2026 14:49:51 -0300 Subject: [PATCH] fix(events): make undiverged honor module autoplanning (#1) * fix: make undiverged honor module autoplanning Teach targeted undiverged checks to reuse Atlantis autoplanning impact resolution, including module dependencies and autodiscovered projects. Co-authored-by: OpenAI Codex Signed-off-by: Warren Krewenki <19960+krewenki@users.noreply.github.com> * chore(events): clarify diverged file logging Signed-off-by: Warren Krewenki <19960+krewenki@users.noreply.github.com> * fix(events): address PR review feedback\n\nSigned-off-by: Warren Krewenki <19960+krewenki@users.noreply.github.com> * fix(events): fall back to full undiverged check on resolver errors\n\nSigned-off-by: Warren Krewenki <19960+krewenki@users.noreply.github.com> --------- Signed-off-by: Warren Krewenki <19960+krewenki@users.noreply.github.com> Co-authored-by: OpenAI Codex --- runatlantis.io/docs/command-requirements.md | 7 +- server/events/command_requirement_handler.go | 29 +- server/events/mock_workingdir_test.go | 60 ++++ server/events/mocks/mock_working_dir.go | 60 ++++ server/events/undiverged_project_impact.go | 255 +++++++++++++++++ .../events/undiverged_project_impact_test.go | 266 ++++++++++++++++++ server/events/working_dir.go | 86 ++++-- server/server.go | 11 +- 8 files changed, 745 insertions(+), 29 deletions(-) create mode 100644 server/events/undiverged_project_impact.go create mode 100644 server/events/undiverged_project_impact_test.go diff --git a/runatlantis.io/docs/command-requirements.md b/runatlantis.io/docs/command-requirements.md index ae77074fd7..63b59f2c95 100644 --- a/runatlantis.io/docs/command-requirements.md +++ b/runatlantis.io/docs/command-requirements.md @@ -230,7 +230,12 @@ patterns to perform a **targeted** divergence check. Instead of failing when **a it only fails when files matching the project's `when_modified` patterns have changed. This is especially useful in monorepos where unrelated changes to other projects should not block your applies. -If no `when_modified` patterns are configured (e.g. auto-discovered projects), `undiverged` falls back to checking all files. +Targeted `undiverged` checks also follow Atlantis project selection for: + +* repo-configured projects affected through [module autoplanning](server-configuration.md#autoplan-modules) +* auto-discovered projects selected by the default `autoplan-file-list` rules + +If Atlantis cannot determine project impact for a repository, `undiverged` falls back to checking all files. **Example scenario:** diff --git a/server/events/command_requirement_handler.go b/server/events/command_requirement_handler.go index 4639ea3a70..02cb73f1e1 100644 --- a/server/events/command_requirement_handler.go +++ b/server/events/command_requirement_handler.go @@ -20,8 +20,13 @@ type CommandRequirementHandler interface { ValidateImportProject(repoDir string, ctx command.ProjectContext) (string, error) } +type UndivergedProjectImpactResolver interface { + HasUndivergedImpact(ctx command.ProjectContext, repoDir string, workingDir WorkingDir) (handled bool, impacted bool, err error) +} + type DefaultCommandRequirementHandler struct { - WorkingDir WorkingDir + WorkingDir WorkingDir + ProjectImpactResolver UndivergedProjectImpactResolver } func (a *DefaultCommandRequirementHandler) ValidateProjectDependencies(ctx command.ProjectContext) (failure string, err error) { @@ -72,7 +77,12 @@ func (a *DefaultCommandRequirementHandler) validateCommandRequirement(repoDir st return fmt.Sprintf("Pull request must be mergeable before running %s%s.", cmd, suffix), nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(ctx.Log, repoDir, ctx.RepoRelDir, ctx.AutoplanWhenModified, ctx.Pull) { + diverged, err := a.hasUndivergedImpact(repoDir, ctx) + if err != nil { + ctx.Log.Warn("evaluating undiverged requirement has failed, falling back to full divergence check: %s", err) + diverged = a.WorkingDir.HasDiverged(ctx.Log, repoDir, ctx.RepoRelDir, nil, ctx.Pull) + } + if diverged { return fmt.Sprintf("Default branch must be rebased onto pull request before running %s.", cmd), nil } } @@ -80,3 +90,18 @@ func (a *DefaultCommandRequirementHandler) validateCommandRequirement(repoDir st // Passed all requirements configured. return "", nil } + +func (a *DefaultCommandRequirementHandler) hasUndivergedImpact(repoDir string, ctx command.ProjectContext) (bool, error) { + if a.ProjectImpactResolver == nil { + return a.WorkingDir.HasDiverged(ctx.Log, repoDir, ctx.RepoRelDir, ctx.AutoplanWhenModified, ctx.Pull), nil + } + + handled, impacted, err := a.ProjectImpactResolver.HasUndivergedImpact(ctx, repoDir, a.WorkingDir) + if err != nil { + return false, err + } + if !handled { + return a.WorkingDir.HasDiverged(ctx.Log, repoDir, ctx.RepoRelDir, ctx.AutoplanWhenModified, ctx.Pull), nil + } + return impacted, nil +} diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index a1074a9abb..07e2db7543 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -90,6 +90,25 @@ func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Re return _ret0 } +func (mock *MockWorkingDir) GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) ([]string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockWorkingDir().") + } + _params := []pegomock.Param{logger, cloneDir, pullRequest} + _result := pegomock.GetGenericMockFrom(mock).Invoke("GetDivergedFiles", _params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 []string + var _ret1 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].([]string) + } + if _result[1] != nil { + _ret1 = _result[1].(error) + } + } + return _ret0, _ret1 +} + func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") @@ -427,6 +446,47 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( return } +func (verifier *VerifierMockWorkingDir) GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) *MockWorkingDir_GetDivergedFiles_OngoingVerification { + _params := []pegomock.Param{logger, cloneDir, pullRequest} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetDivergedFiles", _params, verifier.timeout) + return &MockWorkingDir_GetDivergedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockWorkingDir_GetDivergedFiles_OngoingVerification struct { + mock *MockWorkingDir + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockWorkingDir_GetDivergedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, models.PullRequest) { + logger, cloneDir, pullRequest := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1], pullRequest[len(pullRequest)-1] +} + +func (c *MockWorkingDir_GetDivergedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []models.PullRequest) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + } + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } + } + if len(_params) > 2 { + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(models.PullRequest) + } + } + } + return +} + func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { _params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", _params, verifier.timeout) diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 32489e8f8f..f383285b60 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -90,6 +90,25 @@ func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Re return _ret0 } +func (mock *MockWorkingDir) GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) ([]string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockWorkingDir().") + } + _params := []pegomock.Param{logger, cloneDir, pullRequest} + _result := pegomock.GetGenericMockFrom(mock).Invoke("GetDivergedFiles", _params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var _ret0 []string + var _ret1 error + if len(_result) != 0 { + if _result[0] != nil { + _ret0 = _result[0].([]string) + } + if _result[1] != nil { + _ret1 = _result[1].(error) + } + } + return _ret0, _ret1 +} + func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") @@ -427,6 +446,47 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( return } +func (verifier *VerifierMockWorkingDir) GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) *MockWorkingDir_GetDivergedFiles_OngoingVerification { + _params := []pegomock.Param{logger, cloneDir, pullRequest} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetDivergedFiles", _params, verifier.timeout) + return &MockWorkingDir_GetDivergedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockWorkingDir_GetDivergedFiles_OngoingVerification struct { + mock *MockWorkingDir + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockWorkingDir_GetDivergedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, models.PullRequest) { + logger, cloneDir, pullRequest := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1], pullRequest[len(pullRequest)-1] +} + +func (c *MockWorkingDir_GetDivergedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []models.PullRequest) { + _params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(_params) > 0 { + if len(_params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range _params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + } + if len(_params) > 1 { + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range _params[1] { + _param1[u] = param.(string) + } + } + if len(_params) > 2 { + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range _params[2] { + _param2[u] = param.(models.PullRequest) + } + } + } + return +} + func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { _params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", _params, verifier.timeout) diff --git a/server/events/undiverged_project_impact.go b/server/events/undiverged_project_impact.go new file mode 100644 index 0000000000..86cd799e8e --- /dev/null +++ b/server/events/undiverged_project_impact.go @@ -0,0 +1,255 @@ +// Copyright 2025 The Atlantis Authors +// SPDX-License-Identifier: Apache-2.0 + +package events + +import ( + "fmt" + "path/filepath" + + "github.com/runatlantis/atlantis/server/core/config" + "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/events/command" +) + +type undivergedProjectImpactMode int + +const ( + undivergedProjectImpactModeNone undivergedProjectImpactMode = iota + undivergedProjectImpactModeConfigured + undivergedProjectImpactModeAutoDiscovered +) + +type undivergedProjectImpactTarget struct { + mode undivergedProjectImpactMode + repoCfg valid.RepoCfg +} + +type undivergedProjectImpactResolver struct { + ParserValidator *config.ParserValidator + ProjectFinder ProjectFinder + GlobalCfg valid.GlobalCfg + AutoDetectModuleFiles string + AutoplanFileList string + AutoDiscoverMode string +} + +// NewUndivergedProjectImpactResolver builds the resolver used by targeted +// undiverged checks to mirror Atlantis project selection. +func NewUndivergedProjectImpactResolver( + parserValidator *config.ParserValidator, + projectFinder ProjectFinder, + globalCfg valid.GlobalCfg, + autoDetectModuleFiles string, + autoplanFileList string, + autoDiscoverMode string, +) *undivergedProjectImpactResolver { + return &undivergedProjectImpactResolver{ + ParserValidator: parserValidator, + ProjectFinder: projectFinder, + GlobalCfg: globalCfg, + AutoDetectModuleFiles: autoDetectModuleFiles, + AutoplanFileList: autoplanFileList, + AutoDiscoverMode: autoDiscoverMode, + } +} + +func (r *undivergedProjectImpactResolver) HasUndivergedImpact( + ctx command.ProjectContext, + repoDir string, + workingDir WorkingDir, +) (handled bool, impacted bool, err error) { + target, err := r.resolveTarget(ctx, repoDir) + if err != nil { + return false, false, err + } + if target.mode == undivergedProjectImpactModeNone { + return false, false, nil + } + + divergedFiles, err := workingDir.GetDivergedFiles(ctx.Log, repoDir, ctx.Pull) + if err != nil { + return true, false, err + } + + impacted, err = r.impactedByModifiedFiles(ctx, repoDir, target, divergedFiles) + if err != nil { + return true, false, err + } + + return true, impacted, nil +} + +func (r *undivergedProjectImpactResolver) resolveTarget(ctx command.ProjectContext, repoDir string) (undivergedProjectImpactTarget, error) { + if r == nil { + return undivergedProjectImpactTarget{}, nil + } + + repoCfgFile := r.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) + hasRepoCfg, err := r.ParserValidator.HasRepoCfg(repoDir, repoCfgFile) + if err != nil { + return undivergedProjectImpactTarget{}, fmt.Errorf("looking for %q in %q: %w", repoCfgFile, repoDir, err) + } + + var repoCfg valid.RepoCfg + if hasRepoCfg { + repoCfg, err = r.ParserValidator.ParseRepoCfg(repoDir, r.GlobalCfg, ctx.Pull.BaseRepo.ID(), ctx.Pull.BaseBranch) + if err != nil { + return undivergedProjectImpactTarget{}, fmt.Errorf("parsing %s: %w", repoCfgFile, err) + } + } + + for _, project := range repoCfg.Projects { + if matchesConfiguredProjectContext(project, ctx) { + return undivergedProjectImpactTarget{ + mode: undivergedProjectImpactModeConfigured, + repoCfg: repoCfg, + }, nil + } + } + + if r.shouldUseAutoDiscoveredTargeting(ctx, repoCfg) { + return undivergedProjectImpactTarget{ + mode: undivergedProjectImpactModeAutoDiscovered, + repoCfg: repoCfg, + }, nil + } + + return undivergedProjectImpactTarget{ + mode: undivergedProjectImpactModeNone, + repoCfg: repoCfg, + }, nil +} + +func (r *undivergedProjectImpactResolver) impactedByModifiedFiles( + ctx command.ProjectContext, + repoDir string, + target undivergedProjectImpactTarget, + modifiedFiles []string, +) (bool, error) { + if target.mode == undivergedProjectImpactModeNone { + return false, nil + } + + moduleInfo, err := FindModuleProjects(repoDir, r.AutoDetectModuleFiles) + if err != nil { + return false, fmt.Errorf("loading project module dependencies: %w", err) + } + + switch target.mode { + case undivergedProjectImpactModeConfigured: + return r.configuredProjectImpacted(ctx, repoDir, target.repoCfg, modifiedFiles, moduleInfo) + case undivergedProjectImpactModeAutoDiscovered: + return r.autoDiscoveredProjectImpacted(ctx, repoDir, target.repoCfg, modifiedFiles, moduleInfo) + default: + return false, nil + } +} + +func (r *undivergedProjectImpactResolver) configuredProjectImpacted( + ctx command.ProjectContext, + repoDir string, + repoCfg valid.RepoCfg, + modifiedFiles []string, + moduleInfo ModuleProjects, +) (bool, error) { + projects, err := r.ProjectFinder.DetermineProjectsViaConfig(ctx.Log, modifiedFiles, repoCfg, repoDir, moduleInfo) + if err != nil { + return false, err + } + + for _, project := range projects { + if matchesConfiguredProjectContext(project, ctx) { + return true, nil + } + } + + return false, nil +} + +func (r *undivergedProjectImpactResolver) autoDiscoveredProjectImpacted( + ctx command.ProjectContext, + repoDir string, + repoCfg valid.RepoCfg, + modifiedFiles []string, + moduleInfo ModuleProjects, +) (bool, error) { + modifiedProjects := r.ProjectFinder.DetermineProjects( + ctx.Log, + modifiedFiles, + ctx.Pull.BaseRepo.FullName, + repoDir, + r.AutoplanFileList, + moduleInfo, + ) + + configuredProjDirs := make(map[string]bool) + for _, configProj := range repoCfg.Projects { + configuredProjDirs[filepath.Clean(configProj.Dir)] = true + } + + currentDir := filepath.Clean(ctx.RepoRelDir) + for _, project := range modifiedProjects { + projectDir := filepath.Clean(project.Path) + if r.isAutoDiscoverPathIgnored(ctx, repoCfg, projectDir) { + continue + } + if configuredProjDirs[projectDir] { + continue + } + if projectDir == currentDir { + return true, nil + } + } + + return false, nil +} + +func (r *undivergedProjectImpactResolver) shouldUseAutoDiscoveredTargeting(ctx command.ProjectContext, repoCfg valid.RepoCfg) bool { + if ctx.ProjectName != "" { + return false + } + + if !r.autoDiscoverModeEnabled(ctx, repoCfg) { + return false + } + + currentDir := filepath.Clean(ctx.RepoRelDir) + if len(repoCfg.FindProjectsByDir(currentDir)) > 0 { + return false + } + + return !r.isAutoDiscoverPathIgnored(ctx, repoCfg, currentDir) +} + +func (r *undivergedProjectImpactResolver) autoDiscoverModeEnabled(ctx command.ProjectContext, repoCfg valid.RepoCfg) bool { + defaultAutoDiscoverMode := valid.AutoDiscoverMode(r.AutoDiscoverMode) + if defaultAutoDiscoverMode == "" { + defaultAutoDiscoverMode = valid.AutoDiscoverAutoMode + } + + globalAutoDiscover := r.GlobalCfg.RepoAutoDiscoverCfg(ctx.Pull.BaseRepo.ID()) + if globalAutoDiscover != nil { + defaultAutoDiscoverMode = globalAutoDiscover.Mode + } + + return repoCfg.AutoDiscoverEnabled(defaultAutoDiscoverMode) +} + +func (r *undivergedProjectImpactResolver) isAutoDiscoverPathIgnored(ctx command.ProjectContext, repoCfg valid.RepoCfg, path string) bool { + fromGlobalAutoDiscover := r.GlobalCfg.RepoAutoDiscoverCfg(ctx.Pull.BaseRepo.ID()) + if fromGlobalAutoDiscover != nil { + return fromGlobalAutoDiscover.IsPathIgnored(path) + } + if repoCfg.AutoDiscover != nil { + return repoCfg.AutoDiscover.IsPathIgnored(path) + } + + return false +} + +func matchesConfiguredProjectContext(project valid.Project, ctx command.ProjectContext) bool { + return filepath.Clean(project.Dir) == filepath.Clean(ctx.RepoRelDir) && + project.Workspace == ctx.Workspace && + project.GetName() == ctx.ProjectName +} diff --git a/server/events/undiverged_project_impact_test.go b/server/events/undiverged_project_impact_test.go new file mode 100644 index 0000000000..b3113a6b2f --- /dev/null +++ b/server/events/undiverged_project_impact_test.go @@ -0,0 +1,266 @@ +// Copyright 2025 The Atlantis Authors +// SPDX-License-Identifier: Apache-2.0 + +package events + +import ( + "os" + "path/filepath" + "testing" + + . "github.com/petergtz/pegomock/v4" + "github.com/runatlantis/atlantis/server/core/config" + "github.com/runatlantis/atlantis/server/core/config/raw" + "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" + . "github.com/runatlantis/atlantis/testing" +) + +const defaultAutoplanFileList = "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl,**/.terraform.lock.hcl" + +func TestUndivergedProjectImpactResolver_ConfiguredProjectModuleChange(t *testing.T) { + repoDir := configuredProjectRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + ctx := newTestUndivergedProjectContext(t, "project1") + + target, err := resolver.resolveTarget(ctx, repoDir) + Ok(t, err) + Equals(t, undivergedProjectImpactModeConfigured, target.mode) + + impacted, err := resolver.impactedByModifiedFiles(ctx, repoDir, target, []string{"modules/database/main.tf"}) + Ok(t, err) + Equals(t, true, impacted) +} + +func TestUndivergedProjectImpactResolver_ConfiguredProjectIgnoresUnrelatedChanges(t *testing.T) { + repoDir := configuredProjectRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + ctx := newTestUndivergedProjectContext(t, "project1") + + target, err := resolver.resolveTarget(ctx, repoDir) + Ok(t, err) + Equals(t, undivergedProjectImpactModeConfigured, target.mode) + + impacted, err := resolver.impactedByModifiedFiles(ctx, repoDir, target, []string{"project2/main.tf"}) + Ok(t, err) + Equals(t, false, impacted) +} + +func TestUndivergedProjectImpactResolver_AutoDiscoveredProjectUsesModuleAutoplanning(t *testing.T) { + repoDir := autoDiscoveredRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + ctx := newTestUndivergedProjectContext(t, "project1") + + target, err := resolver.resolveTarget(ctx, repoDir) + Ok(t, err) + Equals(t, undivergedProjectImpactModeAutoDiscovered, target.mode) + + impacted, err := resolver.impactedByModifiedFiles(ctx, repoDir, target, []string{"modules/database/main.tf"}) + Ok(t, err) + Equals(t, true, impacted) +} + +func TestUndivergedProjectImpactResolver_AutoDiscoveredProjectIgnoresUnrelatedChanges(t *testing.T) { + repoDir := autoDiscoveredRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + ctx := newTestUndivergedProjectContext(t, "project1") + + target, err := resolver.resolveTarget(ctx, repoDir) + Ok(t, err) + Equals(t, undivergedProjectImpactModeAutoDiscovered, target.mode) + + impacted, err := resolver.impactedByModifiedFiles(ctx, repoDir, target, []string{"project2/main.tf"}) + Ok(t, err) + Equals(t, false, impacted) +} + +func TestUndivergedProjectImpactResolver_NoTargetWhenGlobalAutoDiscoverDisabled(t *testing.T) { + repoDir := autoDiscoveredRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "disabled") + resolver.GlobalCfg.Repos[0].AutoDiscover = &valid.AutoDiscover{Mode: valid.AutoDiscoverDisabledMode} + ctx := newTestUndivergedProjectContext(t, "project1") + + target, err := resolver.resolveTarget(ctx, repoDir) + Ok(t, err) + Equals(t, undivergedProjectImpactModeNone, target.mode) +} + +func TestDefaultCommandRequirementHandler_TargetedUndivergedFailsForImpactedConfiguredProject(t *testing.T) { + RegisterMockTestingT(t) + + repoDir := configuredProjectRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + workingDir := NewMockWorkingDir() + When(workingDir.GetDivergedFiles(Any[logging.SimpleLogging](), Any[string](), Any[models.PullRequest]())).ThenReturn([]string{"modules/database/main.tf"}, nil) + + handler := &DefaultCommandRequirementHandler{ + WorkingDir: workingDir, + ProjectImpactResolver: resolver, + } + + ctx := newTestUndivergedProjectContext(t, "project1") + ctx.ApplyRequirements = []string{raw.UnDivergedRequirement} + + failure, err := handler.ValidateApplyProject(repoDir, ctx) + Ok(t, err) + Equals(t, "Default branch must be rebased onto pull request before running apply.", failure) +} + +func TestDefaultCommandRequirementHandler_TargetedUndivergedPassesForUnrelatedConfiguredChange(t *testing.T) { + RegisterMockTestingT(t) + + repoDir := configuredProjectRepo(t) + resolver := newTestUndivergedProjectImpactResolver("**/*.tf", defaultAutoplanFileList, "auto") + workingDir := NewMockWorkingDir() + When(workingDir.GetDivergedFiles(Any[logging.SimpleLogging](), Any[string](), Any[models.PullRequest]())).ThenReturn([]string{"project2/main.tf"}, nil) + + handler := &DefaultCommandRequirementHandler{ + WorkingDir: workingDir, + ProjectImpactResolver: resolver, + } + + ctx := newTestUndivergedProjectContext(t, "project1") + ctx.ApplyRequirements = []string{raw.UnDivergedRequirement} + + failure, err := handler.ValidateApplyProject(repoDir, ctx) + Ok(t, err) + Equals(t, "", failure) +} + +func TestDefaultCommandRequirementHandler_TargetedUndivergedFallsBackToFullCheckOnResolverError(t *testing.T) { + RegisterMockTestingT(t) + + repoDir := configuredProjectRepo(t) + workingDir := NewMockWorkingDir() + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Eq(repoDir), Eq("project1"), Eq([]string{"modules/database/**"}), Any[models.PullRequest]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Eq(repoDir), Eq("project1"), Eq([]string(nil)), Any[models.PullRequest]())).ThenReturn(false) + + handler := &DefaultCommandRequirementHandler{ + WorkingDir: workingDir, + ProjectImpactResolver: stubUndivergedProjectImpactResolver{err: os.ErrInvalid}, + } + + ctx := newTestUndivergedProjectContext(t, "project1") + ctx.ApplyRequirements = []string{raw.UnDivergedRequirement} + ctx.AutoplanWhenModified = []string{"modules/database/**"} + + failure, err := handler.ValidateApplyProject(repoDir, ctx) + Ok(t, err) + Equals(t, "", failure) +} + +type stubUndivergedProjectImpactResolver struct { + handled bool + impacted bool + err error +} + +func (s stubUndivergedProjectImpactResolver) HasUndivergedImpact(command.ProjectContext, string, WorkingDir) (bool, bool, error) { + return s.handled, s.impacted, s.err +} + +func configuredProjectRepo(t *testing.T) string { + t.Helper() + + repoDir := DirStructure(t, map[string]any{ + "atlantis.yaml": nil, + "project1": map[string]any{ + "main.tf": nil, + }, + "modules": map[string]any{ + "database": map[string]any{ + "main.tf": nil, + }, + }, + }) + + writeTestFile(t, filepath.Join(repoDir, "atlantis.yaml"), `version: 3 +projects: +- dir: project1 + workspace: default +`) + writeTestFile(t, filepath.Join(repoDir, "project1", "main.tf"), `module "database" { + source = "../modules/database" +} +`) + writeTestFile(t, filepath.Join(repoDir, "modules", "database", "main.tf"), `output "name" { + value = "database" +} +`) + + return repoDir +} + +func autoDiscoveredRepo(t *testing.T) string { + t.Helper() + + repoDir := DirStructure(t, map[string]any{ + "project1": map[string]any{ + "main.tf": nil, + }, + "project2": map[string]any{ + "main.tf": nil, + }, + "modules": map[string]any{ + "database": map[string]any{ + "main.tf": nil, + }, + }, + }) + + writeTestFile(t, filepath.Join(repoDir, "project1", "main.tf"), `module "database" { + source = "../modules/database" +} +`) + writeTestFile(t, filepath.Join(repoDir, "project2", "main.tf"), `output "name" { + value = "project2" +} +`) + writeTestFile(t, filepath.Join(repoDir, "modules", "database", "main.tf"), `output "name" { + value = "database" +} +`) + + return repoDir +} + +func newTestUndivergedProjectImpactResolver(autoDetectModuleFiles string, autoplanFileList string, autoDiscoverMode string) *undivergedProjectImpactResolver { + parserValidator := &config.ParserValidator{} + globalCfg := valid.NewGlobalCfgFromArgs(valid.GlobalCfgArgs{}) + + return NewUndivergedProjectImpactResolver( + parserValidator, + &DefaultProjectFinder{}, + globalCfg, + autoDetectModuleFiles, + autoplanFileList, + autoDiscoverMode, + ) +} + +func newTestUndivergedProjectContext(t *testing.T, repoRelDir string) command.ProjectContext { + t.Helper() + + return command.ProjectContext{ + Log: logging.NewNoopLogger(t), + RepoRelDir: repoRelDir, + Workspace: DefaultWorkspace, + Pull: models.PullRequest{ + BaseBranch: "main", + BaseRepo: models.Repo{ + FullName: "owner/repo", + VCSHost: models.VCSHost{ + Hostname: "github.com", + }, + }, + }, + } +} + +func writeTestFile(t *testing.T, path string, contents string) { + t.Helper() + + Ok(t, os.WriteFile(path, []byte(contents), 0600)) +} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 65264a3491..2665fab4f5 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -56,6 +56,10 @@ type WorkingDir interface { // files matching those patterns is considered. When patterns are empty, // any divergence is reported. HasDiverged(logger logging.SimpleLogging, cloneDir string, projectPath string, autoplanWhenModified []string, pullRequest models.PullRequest) bool + // GetDivergedFiles returns the files changed on the base branch since the + // current checkout. When merge checkout is disabled there is no divergence + // check, so this returns no files and no error. + GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) ([]string, error) GetPullDir(r models.Repo, p models.PullRequest) (string, error) // Delete deletes the workspace for this repo and pull. Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error @@ -264,6 +268,22 @@ func (w *FileWorkspace) HasDiverged(logger logging.SimpleLogging, cloneDir strin return w.hasDiverged(logger, cloneDir) } +func (w *FileWorkspace) GetDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) ([]string, error) { + logger.Debug("GetDivergedFiles: getting diverged files in %s", cloneDir) + if !w.CheckoutMerge { + logger.Debug("GetDivergedFiles: CheckoutMerge is false, skipping diverged file lookup") + return nil, nil + } + + unlockGitReadLock := w.gitReadLock(cloneDir) + defer unlockGitReadLock() + + unlockGitRefLock := w.gitRefLock(cloneDir) + defer unlockGitRefLock() + + return w.getDivergedFiles(logger, cloneDir, pullRequest) +} + // hasDiverged runs fetch and git status to detect divergence. Caller must hold // gitRefLock(cloneDir); if not already holding the repo write lock (e.g. from recheckDiverged), // caller must also hold gitReadLock(cloneDir). @@ -298,35 +318,12 @@ func (w *FileWorkspace) hasDivergedForPatterns(logger logging.SimpleLogging, clo logger.Debug("HasDiverged: running targeted divergence check for project %s with %d patterns", projectPath, len(autoplanWhenModified)) - logger.Debug("HasDiverged: running git fetch") - fetchCmd := exec.Command("git", "fetch") - fetchCmd.Dir = cloneDir - outputFetch, err := fetchCmd.CombinedOutput() - if err != nil { - logger.Warn("HasDiverged: fetching repo has failed: %s", string(outputFetch)) - return true - } - - remoteRef := fmt.Sprintf("origin/%s", pullRequest.BaseBranch) - revisionRange := fmt.Sprintf("HEAD..%s", remoteRef) - - logger.Debug("HasDiverged: getting changed files in %s", revisionRange) - changedFilesCmd := exec.Command("git", "log", revisionRange, "--name-only", "--format=") //nolint:gosec // remoteRef is from pullRequest.BaseBranch, a controlled VCS branch name - changedFilesCmd.Dir = cloneDir - outputChangedFiles, err := changedFilesCmd.CombinedOutput() + nonEmptyChangedFiles, err := w.getDivergedFiles(logger, cloneDir, pullRequest) if err != nil { - logger.Warn("HasDiverged: getting changed files has failed: %s", string(outputChangedFiles)) + logger.Warn("HasDiverged: getting changed files has failed: %s", err) return true } - changedFiles := strings.Split(strings.TrimSpace(string(outputChangedFiles)), "\n") - var nonEmptyChangedFiles []string - for _, file := range changedFiles { - if strings.TrimSpace(file) != "" { - nonEmptyChangedFiles = append(nonEmptyChangedFiles, file) - } - } - if len(nonEmptyChangedFiles) == 0 { logger.Debug("HasDiverged: no changed files found in divergent commits") return false @@ -373,6 +370,45 @@ func (w *FileWorkspace) hasDivergedForPatterns(logger logging.SimpleLogging, clo return false } +func divergedFilesCommandError(action string, err error, output []byte) error { + trimmedOutput := strings.TrimSpace(string(output)) + if trimmedOutput == "" { + return fmt.Errorf("%s: %w", action, err) + } + return fmt.Errorf("%s: %w: %s", action, err, trimmedOutput) +} + +func (w *FileWorkspace) getDivergedFiles(logger logging.SimpleLogging, cloneDir string, pullRequest models.PullRequest) ([]string, error) { + logger.Debug("GetDivergedFiles: running git fetch") + fetchCmd := exec.Command("git", "fetch") + fetchCmd.Dir = cloneDir + outputFetch, err := fetchCmd.CombinedOutput() + if err != nil { + return nil, divergedFilesCommandError("fetching repo", err, outputFetch) + } + + remoteRef := fmt.Sprintf("origin/%s", pullRequest.BaseBranch) + revisionRange := fmt.Sprintf("HEAD..%s", remoteRef) + + logger.Debug("GetDivergedFiles: getting changed files in %s", revisionRange) + changedFilesCmd := exec.Command("git", "log", revisionRange, "--name-only", "--format=") //nolint:gosec // remoteRef is from pullRequest.BaseBranch, a controlled VCS branch name + changedFilesCmd.Dir = cloneDir + outputChangedFiles, err := changedFilesCmd.CombinedOutput() + if err != nil { + return nil, divergedFilesCommandError("getting changed files", err, outputChangedFiles) + } + + changedFiles := strings.Split(strings.TrimSpace(string(outputChangedFiles)), "\n") + var nonEmptyChangedFiles []string + for _, file := range changedFiles { + if strings.TrimSpace(file) != "" { + nonEmptyChangedFiles = append(nonEmptyChangedFiles, file) + } + } + + return nonEmptyChangedFiles, nil +} + func (w *FileWorkspace) remoteHasBranch(logger logging.SimpleLogging, c wrappedGitContext, branch string) bool { ref := "refs/remotes/origin/" + branch diff --git a/server/server.go b/server/server.go index 137332aea3..595fcb8312 100644 --- a/server/server.go +++ b/server/server.go @@ -650,11 +650,12 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { CommitStatusUpdater: commitStatusUpdater, Router: router, } + projectFinder := &events.DefaultProjectFinder{} projectCommandBuilder := events.NewInstrumentedProjectCommandBuilder( logger, policyChecksEnabled, parserValidator, - &events.DefaultProjectFinder{}, + projectFinder, vcsClient, workingDir, workingDirLocker, @@ -694,6 +695,14 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { applyRequirementHandler := &events.DefaultCommandRequirementHandler{ WorkingDir: workingDir, + ProjectImpactResolver: events.NewUndivergedProjectImpactResolver( + parserValidator, + projectFinder, + globalCfg, + userConfig.AutoplanModulesFromProjects, + userConfig.AutoplanFileList, + userConfig.AutoDiscoverModeFlag, + ), } cancellationTracker := events.NewCancellationTracker()