Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
7201db1
Merge branch 'feat/checkpoints-v2-entire-resume' of github.com:entire…
pfleidi Mar 28, 2026
60fbfdc
Merge branch 'feat/checkpoints-v2-entire-resume' of github.com:entire…
pfleidi Mar 28, 2026
e71bfdb
feat: Introduce IsPushV2RefsEnabled()
pfleidi Mar 30, 2026
0d793fc
refactor: simplify GenerationMetadata to timestamps only, write at ar…
pfleidi Mar 30, 2026
4ebd0b4
refactor: extract WalkCheckpointShards helper for shard iteration
pfleidi Mar 30, 2026
8fa80cf
feat: add tryPushRef and pushRefIfNeeded for v2 custom refs
pfleidi Mar 30, 2026
92c4f7b
feat: implement fetchAndMergeRef for v2 custom ref merge recovery
pfleidi Mar 30, 2026
95c79fe
feat: integrate v2 push into PrePush hook
pfleidi Mar 31, 2026
d4773c3
feat: rotation conflict recovery for v2 /full/current push
pfleidi Mar 31, 2026
186271d
feat: fetch v2 /main ref from checkpoint remote when missing locally
pfleidi Mar 31, 2026
c0505b2
feat: fetch-on-demand for remote /full/* refs in entire resume
pfleidi Mar 31, 2026
5f79093
test: add integration tests for v2 push cycle
pfleidi Mar 31, 2026
0dbbf7d
fix: resolve lint warnings in v2 push logic
pfleidi Mar 31, 2026
05ee59e
refactor: deduplicate generationRefPattern across packages
pfleidi Mar 31, 2026
13fd367
fix: log warning when generation timestamp update fails during rotati…
pfleidi Mar 31, 2026
ca7d12b
fix: document why fetchRemoteFullRefs uses origin directly
pfleidi Mar 31, 2026
750571d
fix: respect checkpoint_remote in all V2GitStore operations
pfleidi Mar 31, 2026
dd13101
fix: use plumbing.ReferenceName equality instead of string cast
pfleidi Mar 31, 2026
edca0a9
fix: add nolint explanations for intentional nil returns in fetchV2Ma…
pfleidi Mar 31, 2026
ac79fb7
Merge branch 'feat/checkpoints-v2-entire-resume' into feat/checkpoint…
pfleidi Mar 31, 2026
6829af1
fix: resolve lint warnings (errcheck, nilerr, testifylint)
pfleidi Mar 31, 2026
09ad6c7
review: disconnect stdin and disable terminal prompts on hook-context…
pfleidi Mar 31, 2026
55538f5
review: defer temp ref cleanup in fetchAndMergeRef to cover all error…
pfleidi Mar 31, 2026
cf0e643
review: use jsonutil.MarshalIndentWithNewline for consistent generati…
pfleidi Mar 31, 2026
664b843
review: default fetchRemote to origin when empty in NewV2GitStore
pfleidi Mar 31, 2026
61d1648
fix: remove unused nolint:nilerr directives in fetchV2MainRefIfMissing
pfleidi Mar 31, 2026
aeadae0
Merge remote-tracking branch 'origin/main' into feat/checkpoints-v2-p…
pfleidi Apr 2, 2026
4945b1c
Merge remote-tracking branch 'origin/main' into feat/checkpoints-v2-p…
pfleidi Apr 2, 2026
7fc22bd
refactor: use CheckpointGitCommand for v2 push/fetch/ls-remote
pfleidi Apr 2, 2026
fc1aa48
Merge remote-tracking branch 'origin/feat/checkpoints-v2-entire-resum…
pfleidi Apr 2, 2026
e9d0009
refactor: simplify fetchV2MainRefIfMissing to delegate to FetchV2Main…
pfleidi Apr 2, 2026
2bea35f
review: add debug logging for checkpoint_remote URL resolution failures
pfleidi Apr 2, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions cmd/entire/cli/agent/external/external_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ esac

// Temporarily override the default timeout to keep the test fast.
orig := defaultRunTimeout
defaultRunTimeout = 1 * time.Second
defaultRunTimeout = 200 * time.Millisecond
t.Cleanup(func() { defaultRunTimeout = orig })

start := time.Now()
Expand All @@ -174,8 +174,8 @@ esac
if err == nil {
t.Fatal("expected timeout error, got nil")
}
// Should be killed around 1s, not 60s.
if elapsed >= 5*time.Second {
// Should be killed around 200ms, not 60s.
if elapsed >= 4*time.Second {
t.Errorf("run() took %v; default timeout was not applied", elapsed)
}
}
Expand All @@ -202,7 +202,7 @@ esac

// Provide a context with a short deadline. run() should respect it
// and NOT override with its own (longer) timeout.
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond)
defer cancel()

start := time.Now()
Expand All @@ -212,7 +212,7 @@ esac
if err == nil {
t.Fatal("expected timeout error, got nil")
}
if elapsed >= 5*time.Second {
if elapsed >= 4*time.Second {
t.Errorf("run() took %v; caller's deadline was not respected", elapsed)
}
}
Expand Down
38 changes: 24 additions & 14 deletions cmd/entire/cli/agent/opencode/cli_commands.go
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
package opencode

import (
"bytes"
"context"
"errors"
"fmt"
"os"
"os/exec"
"strings"
"time"
Expand All @@ -12,27 +13,36 @@ import (
// openCodeCommandTimeout is the maximum time to wait for opencode CLI commands.
const openCodeCommandTimeout = 30 * time.Second

// runOpenCodeExport runs `opencode export <sessionID>` to export a session
// from OpenCode's database. Returns the JSON export data as bytes.
func runOpenCodeExport(ctx context.Context, sessionID string) ([]byte, error) {
// runOpenCodeExportToFile runs `opencode export <sessionID>` and redirects stdout
// to outputPath. This avoids pipe/stdout capture truncation bugs in some opencode versions.
func runOpenCodeExportToFile(ctx context.Context, sessionID, outputPath string) (retErr error) {
ctx, cancel := context.WithTimeout(ctx, openCodeCommandTimeout)
defer cancel()

cmd := exec.CommandContext(ctx, "opencode", "export", sessionID)
output, err := cmd.Output()
//nolint:gosec // outputPath is generated by the caller under .entire/tmp
file, err := os.OpenFile(outputPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600)
if err != nil {
if ctx.Err() == context.DeadlineExceeded {
return nil, fmt.Errorf("opencode export timed out after %s", openCodeCommandTimeout)
return fmt.Errorf("failed to create export file: %w", err)
}
defer func() {
if closeErr := file.Close(); closeErr != nil && retErr == nil {
retErr = fmt.Errorf("failed to close export file: %w", closeErr)
}
// Get stderr for better error message
exitErr := &exec.ExitError{}
if errors.As(err, &exitErr) {
return nil, fmt.Errorf("opencode export failed: %w (stderr: %s)", err, string(exitErr.Stderr))
}()

cmd := exec.CommandContext(ctx, "opencode", "export", sessionID)
cmd.Stdout = file
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
_ = os.Remove(outputPath)
if ctx.Err() == context.DeadlineExceeded {
return fmt.Errorf("opencode export timed out after %s", openCodeCommandTimeout)
}
return nil, fmt.Errorf("opencode export failed: %w", err)
return fmt.Errorf("opencode export failed: %w (stderr: %s)", err, strings.TrimSpace(stderr.String()))
}

return output, nil
return nil
}

// runOpenCodeSessionDelete runs `opencode session delete <sessionID>` to remove
Expand Down
35 changes: 17 additions & 18 deletions cmd/entire/cli/agent/opencode/lifecycle.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ import (
"github.com/entireio/cli/cmd/entire/cli/validation"
)

var runOpenCodeExportToFileFn = runOpenCodeExportToFile

// Hook name constants — these become CLI subcommands under `entire hooks opencode`.
const (
HookNameSessionStart = "session-start"
Expand Down Expand Up @@ -184,33 +186,30 @@ func (a *OpenCodeAgent) fetchAndCacheExport(ctx context.Context, sessionID strin
return "", fmt.Errorf("mock export file not found: %s (ENTIRE_TEST_OPENCODE_MOCK_EXPORT is set)", tmpFile)
}

// Call opencode export to get the transcript (always refresh on each turn)
data, err := runOpenCodeExport(ctx, sessionID)
if err != nil {
// Write export directly to temp file under .entire. Avoid stdout capture,
// which can truncate large payloads in some opencode versions.
if err := os.MkdirAll(tmpDir, 0o750); err != nil {
return "", fmt.Errorf("failed to create temp dir: %w", err)
}

if err := runOpenCodeExportToFileFn(ctx, sessionID, tmpFile); err != nil {
return "", fmt.Errorf("opencode export failed: %w", err)
}

// Validate output is valid JSON before caching
//nolint:gosec // tmpFile is constructed from validated session ID under repo .entire/tmp
data, err := os.ReadFile(tmpFile)
if err != nil {
return "", fmt.Errorf("failed to read export file: %w", err)
}

if !json.Valid(data) {
// Emit prefix/suffix at DEBUG only — the error message propagates to
// WARN logs and could contain sensitive transcript/user content.
logging.Debug(logging.WithComponent(ctx, "lifecycle"),
"opencode export returned invalid JSON",
"opencode export file contained invalid JSON",
slog.Int("bytes", len(data)),
slog.String("prefix", string(data[:min(len(data), 200)])),
slog.String("suffix", string(data[max(0, len(data)-200):])),
slog.String("path", tmpFile),
)
return "", fmt.Errorf("opencode export returned invalid JSON (%d bytes)", len(data))
}

// Write to temp directory under .entire
if err := os.MkdirAll(tmpDir, 0o750); err != nil {
return "", fmt.Errorf("failed to create temp dir: %w", err)
}

if err := os.WriteFile(tmpFile, data, 0o600); err != nil {
return "", fmt.Errorf("failed to write export file: %w", err)
}

return tmpFile, nil
}
31 changes: 31 additions & 0 deletions cmd/entire/cli/agent/opencode/lifecycle_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@ package opencode

import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"testing"

"github.com/entireio/cli/cmd/entire/cli/agent"
"github.com/entireio/cli/cmd/entire/cli/paths"
"github.com/stretchr/testify/require"
)

Expand Down Expand Up @@ -358,3 +361,31 @@ func TestParseHookEvent_TurnEnd_InvalidSessionID(t *testing.T) {
t.Errorf("expected 'contains path separators' error, got: %v", err)
}
}

func TestFetchAndCacheExport_WritesAndValidatesExportFile(t *testing.T) {
tmpDir := t.TempDir()
t.Chdir(tmpDir)

paths.ClearWorktreeRootCache()
t.Cleanup(paths.ClearWorktreeRootCache)

original := runOpenCodeExportToFileFn
runOpenCodeExportToFileFn = func(_ context.Context, sessionID, outputPath string) error {
if sessionID != "ses_abc123" {
return fmt.Errorf("unexpected session id: %s", sessionID)
}
return os.WriteFile(outputPath, []byte(`{"info":{"id":"ses_abc123"},"messages":[]}`), 0o600)
}
t.Cleanup(func() {
runOpenCodeExportToFileFn = original
})

ag := &OpenCodeAgent{}
transcriptPath, err := ag.fetchAndCacheExport(context.Background(), "ses_abc123")
require.NoError(t, err)

content, err := os.ReadFile(transcriptPath)
require.NoError(t, err)
require.True(t, json.Valid(content), "expected cached transcript to be valid JSON")
require.Contains(t, string(content), "\"ses_abc123\"")
}
30 changes: 14 additions & 16 deletions cmd/entire/cli/checkpoint/checkpoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -492,23 +492,21 @@ type CodeLearning struct {
// against the committed tree (may include human edits).
//
// Attribution Metrics:
// - TotalCommitted measures "net additions" (lines added that remain in the commit)
// - AgentPercentage represents "of the new code added, what percentage came from the agent"
// - Deletion work is tracked separately in HumanRemoved (not included in percentage)
//
// Deletion-Only Commits:
// For commits with only deletions (no additions), TotalCommitted will be 0 and
// AgentPercentage will be 0. This is by design - the percentage metric is only
// meaningful for commits that add code. Deletion contributions are captured in
// the HumanRemoved field but don't affect the attribution percentage.
// - TotalCommitted keeps the historical "net additions" view for compatibility
// - TotalLinesChanged measures total committed line changes (adds + modifies + removes)
// - AgentPercentage represents "of the lines changed in this commit, what percentage came from the agent"
// - AgentRemoved tracks committed deletions performed by the agent
type InitialAttribution struct {
CalculatedAt time.Time `json:"calculated_at"`
AgentLines int `json:"agent_lines"` // Lines added by agent (base → shadow diff)
HumanAdded int `json:"human_added"` // Lines added by human (excluding modifications)
HumanModified int `json:"human_modified"` // Lines modified by human (estimate: min(added, removed))
HumanRemoved int `json:"human_removed"` // Lines removed by human (excluding modifications)
TotalCommitted int `json:"total_committed"` // Net additions in commit (agent + human new lines, not total file size)
AgentPercentage float64 `json:"agent_percentage"` // agent_lines / total_committed * 100 (0 for deletion-only commits)
CalculatedAt time.Time `json:"calculated_at"`
AgentLines int `json:"agent_lines"` // Lines added by agent that remain in the commit
AgentRemoved int `json:"agent_removed"` // Lines removed by agent that remain removed in the commit
HumanAdded int `json:"human_added"` // Lines added by human (excluding modifications)
HumanModified int `json:"human_modified"` // Lines modified by human (estimate: min(added, removed))
HumanRemoved int `json:"human_removed"` // Lines removed by human (excluding modifications)
TotalCommitted int `json:"total_committed"` // Net additions in commit (legacy additions-focused metric)
TotalLinesChanged int `json:"total_lines_changed"` // Total committed line changes (adds + modifies + removes)
AgentPercentage float64 `json:"agent_percentage"` // (agent_lines + agent_removed) / total_lines_changed * 100
MetricVersion int `json:"metric_version,omitempty"` // 0/absent = legacy (additions-only %), 2 = changed-lines %
}

// Info provides summary information for listing checkpoints.
Expand Down
89 changes: 31 additions & 58 deletions cmd/entire/cli/checkpoint/committed.go
Original file line number Diff line number Diff line change
Expand Up @@ -894,76 +894,49 @@ func (s *GitStore) ListCommitted(ctx context.Context) ([]CommittedInfo, error) {
var checkpoints []CommittedInfo

// Scan sharded structure: <2-char-prefix>/<remaining-id>/metadata.json
for _, bucketEntry := range tree.Entries {
if bucketEntry.Mode != filemode.Dir {
continue
}
// Bucket should be 2 hex chars
if len(bucketEntry.Name) != 2 {
continue
_ = WalkCheckpointShards(s.repo, tree, func(checkpointID id.CheckpointID, cpTreeHash plumbing.Hash) error { //nolint:errcheck // callback never returns errors
checkpointTree, cpTreeErr := s.repo.TreeObject(cpTreeHash)
if cpTreeErr != nil {
return nil //nolint:nilerr // skip unreadable entries, continue walking
}

bucketTree, treeErr := s.repo.TreeObject(bucketEntry.Hash)
if treeErr != nil {
continue
info := CommittedInfo{
CheckpointID: checkpointID,
}

// Each entry in the bucket is the remaining part of the checkpoint ID
for _, checkpointEntry := range bucketTree.Entries {
if checkpointEntry.Mode != filemode.Dir {
continue
}

checkpointTree, cpTreeErr := s.repo.TreeObject(checkpointEntry.Hash)
if cpTreeErr != nil {
continue
}

// Reconstruct checkpoint ID: <bucket><remaining>
checkpointIDStr := bucketEntry.Name + checkpointEntry.Name
checkpointID, cpIDErr := id.NewCheckpointID(checkpointIDStr)
if cpIDErr != nil {
// Skip invalid checkpoint IDs (shouldn't happen with our own data)
continue
}

info := CommittedInfo{
CheckpointID: checkpointID,
}

// Get details from root metadata file (CheckpointSummary format)
if metadataFile, fileErr := checkpointTree.File(paths.MetadataFileName); fileErr == nil {
if content, contentErr := metadataFile.Contents(); contentErr == nil {
var summary CheckpointSummary
if err := json.Unmarshal([]byte(content), &summary); err == nil {
info.CheckpointsCount = summary.CheckpointsCount
info.FilesTouched = summary.FilesTouched
info.SessionCount = len(summary.Sessions)

// Read session metadata from latest session to get Agent, SessionID, CreatedAt
if len(summary.Sessions) > 0 {
latestIndex := len(summary.Sessions) - 1
latestDir := strconv.Itoa(latestIndex)
if sessionTree, treeErr := checkpointTree.Tree(latestDir); treeErr == nil {
if sessionMetadataFile, smErr := sessionTree.File(paths.MetadataFileName); smErr == nil {
if sessionContent, scErr := sessionMetadataFile.Contents(); scErr == nil {
var sessionMetadata CommittedMetadata
if json.Unmarshal([]byte(sessionContent), &sessionMetadata) == nil {
info.Agent = sessionMetadata.Agent
info.SessionID = sessionMetadata.SessionID
info.CreatedAt = sessionMetadata.CreatedAt
}
// Get details from root metadata file (CheckpointSummary format)
if metadataFile, fileErr := checkpointTree.File(paths.MetadataFileName); fileErr == nil {
if content, contentErr := metadataFile.Contents(); contentErr == nil {
var summary CheckpointSummary
if err := json.Unmarshal([]byte(content), &summary); err == nil {
info.CheckpointsCount = summary.CheckpointsCount
info.FilesTouched = summary.FilesTouched
info.SessionCount = len(summary.Sessions)

// Read session metadata from latest session to get Agent, SessionID, CreatedAt
if len(summary.Sessions) > 0 {
latestIndex := len(summary.Sessions) - 1
latestDir := strconv.Itoa(latestIndex)
if sessionTree, treeErr := checkpointTree.Tree(latestDir); treeErr == nil {
if sessionMetadataFile, smErr := sessionTree.File(paths.MetadataFileName); smErr == nil {
if sessionContent, scErr := sessionMetadataFile.Contents(); scErr == nil {
var sessionMetadata CommittedMetadata
if json.Unmarshal([]byte(sessionContent), &sessionMetadata) == nil {
info.Agent = sessionMetadata.Agent
info.SessionID = sessionMetadata.SessionID
info.CreatedAt = sessionMetadata.CreatedAt
}
}
}
}
}
}
}

checkpoints = append(checkpoints, info)
}
}

checkpoints = append(checkpoints, info)
return nil
})

// Sort by time (most recent first)
sort.Slice(checkpoints, func(i, j int) bool {
Expand Down
Loading
Loading