diff --git a/go.mod b/go.mod index 9f98bf3..b93d932 100644 --- a/go.mod +++ b/go.mod @@ -4,18 +4,19 @@ go 1.25.0 require ( github.com/alecthomas/kong v1.14.0 + github.com/charmbracelet/bubbles v1.0.0 + github.com/charmbracelet/bubbletea v1.3.10 + github.com/charmbracelet/glamour v1.0.0 + github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 golang.org/x/term v0.40.0 + modernc.org/sqlite v1.46.1 ) require ( github.com/alecthomas/chroma/v2 v2.20.0 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect - github.com/charmbracelet/bubbles v1.0.0 // indirect - github.com/charmbracelet/bubbletea v1.3.10 // indirect github.com/charmbracelet/colorprofile v0.4.1 // indirect - github.com/charmbracelet/glamour v1.0.0 // indirect - github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect github.com/charmbracelet/x/ansi v0.11.6 // indirect github.com/charmbracelet/x/cellbuf v0.0.15 // indirect github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect @@ -24,7 +25,9 @@ require ( github.com/clipperhouse/stringish v0.1.1 // indirect github.com/clipperhouse/uax29/v2 v2.5.0 // indirect github.com/dlclark/regexp2 v1.11.5 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/google/uuid v1.6.0 // indirect github.com/gorilla/css v1.0.1 // indirect github.com/lucasb-eyer/go-colorful v1.3.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect @@ -35,11 +38,17 @@ require ( github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/reflow v0.3.0 // indirect github.com/muesli/termenv v0.16.0 // indirect + github.com/ncruces/go-strftime v1.0.0 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect github.com/yuin/goldmark v1.7.13 // indirect github.com/yuin/goldmark-emoji v1.0.6 // indirect + golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect golang.org/x/net v0.38.0 // indirect golang.org/x/sys v0.41.0 // indirect golang.org/x/text v0.30.0 // indirect + modernc.org/libc v1.67.6 // indirect + modernc.org/mathutil v1.7.1 // indirect + modernc.org/memory v1.11.0 // indirect ) diff --git a/go.sum b/go.sum index bc521fe..d754665 100644 --- a/go.sum +++ b/go.sum @@ -8,32 +8,28 @@ github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY= +github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/charmbracelet/bubbles v1.0.0 h1:12J8/ak/uCZEMQ6KU7pcfwceyjLlWsDLAxB5fXonfvc= github.com/charmbracelet/bubbles v1.0.0/go.mod h1:9d/Zd5GdnauMI5ivUIVisuEm3ave1XwXtD1ckyV6r3E= github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw= github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4= -github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= -github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= github.com/charmbracelet/colorprofile v0.4.1 h1:a1lO03qTrSIRaK8c3JRxJDZOvhvIeSco3ej+ngLk1kk= github.com/charmbracelet/colorprofile v0.4.1/go.mod h1:U1d9Dljmdf9DLegaJ0nGZNJvoXAhayhmidOdcBwAvKk= github.com/charmbracelet/glamour v1.0.0 h1:AWMLOVFHTsysl4WV8T8QgkQ0s/ZNZo7CiE4WKhk8l08= github.com/charmbracelet/glamour v1.0.0/go.mod h1:DSdohgOBkMr2ZQNhw4LZxSGpx3SvpeujNoXrQyH2hxo= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 h1:ZR7e0ro+SZZiIZD7msJyA+NjkCNNavuiPBLgerbOziE= github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834/go.mod h1:aKC/t2arECF6rNOnaKaVU6y4t4ZeHQzqfxedE/VkVhA= -github.com/charmbracelet/x/ansi v0.10.2 h1:ith2ArZS0CJG30cIUfID1LXN7ZFXRCww6RUvAPA+Pzw= -github.com/charmbracelet/x/ansi v0.10.2/go.mod h1:HbLdJjQH4UH4AqA2HpRWuWNluRE6zxJH/yteYEYCFa8= github.com/charmbracelet/x/ansi v0.11.6 h1:GhV21SiDz/45W9AnV2R61xZMRri5NlLnl6CVF7ihZW8= github.com/charmbracelet/x/ansi v0.11.6/go.mod h1:2JNYLgQUsyqaiLovhU2Rv/pb8r6ydXKS3NIttu3VGZQ= -github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k= -github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs= github.com/charmbracelet/x/cellbuf v0.0.15 h1:ur3pZy0o6z/R7EylET877CBxaiE1Sp1GMxoFPAIztPI= github.com/charmbracelet/x/cellbuf v0.0.15/go.mod h1:J1YVbR7MUuEGIFPCaaZ96KDl5NoS0DAWkskup+mOY+Q= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ= +github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf h1:rLG0Yb6MQSDKdB52aGX55JT1oi0P0Kuaj7wi1bLUpnI= github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h1:B3UgsnsBZS/eX42BlaNiJkD1pPOUa+oF1IYC6Yd2CEU= -github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= -github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/charmbracelet/x/term v0.2.2 h1:xVRT/S2ZcKdhhOuSP4t5cLi5o+JxklsoEObBSgfgZRk= github.com/charmbracelet/x/term v0.2.2/go.mod h1:kF8CY5RddLWrsgVwpw4kAa6TESp6EB5y3uxGLeCqzAI= github.com/clipperhouse/displaywidth v0.9.0 h1:Qb4KOhYwRiN3viMv1v/3cTBlz3AcAZX3+y9OLhMtAtA= @@ -44,10 +40,18 @@ github.com/clipperhouse/uax29/v2 v2.5.0 h1:x7T0T4eTHDONxFJsL94uKNKPHrclyFI0lm7+w github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g= github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs= +github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag= @@ -57,8 +61,6 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.17 h1:78v8ZlW0bP43XfmAfPsdXcoNCelfMHsDmd/pkENfrjQ= -github.com/mattn/go-runewidth v0.0.17/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw= github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= @@ -71,6 +73,10 @@ github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= +github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w= +github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= @@ -81,8 +87,14 @@ github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA= github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= github.com/yuin/goldmark-emoji v1.0.6 h1:QWfF2FYaXwL74tfGOW5izeiZepUDroDJfWubQI9HTHs= github.com/yuin/goldmark-emoji v1.0.6/go.mod h1:ukxJDKFpdFb5x0a5HqbdlcKtebh086iJpI31LTKmWuA= +golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY= +golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= +golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= @@ -91,3 +103,33 @@ golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis= +modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= +modernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc= +modernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM= +modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA= +modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc= +modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI= +modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= +modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE= +modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY= +modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks= +modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI= +modernc.org/libc v1.67.6 h1:eVOQvpModVLKOdT+LvBPjdQqfrZq+pC39BygcT+E7OI= +modernc.org/libc v1.67.6/go.mod h1:JAhxUVlolfYDErnwiqaLvUqc8nfb2r6S6slAgZOnaiE= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= +modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= +modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= +modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= +modernc.org/sqlite v1.46.1 h1:eFJ2ShBLIEnUWlLy12raN0Z1plqmFX9Qe3rjQTKt6sU= +modernc.org/sqlite v1.46.1/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA= +modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= +modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A= +modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= +modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= diff --git a/internal/app/app_test.go b/internal/app/app_test.go index 8d482c6..46ad8f0 100644 --- a/internal/app/app_test.go +++ b/internal/app/app_test.go @@ -10,6 +10,8 @@ import ( "path/filepath" "strings" "testing" + + "github.com/andyhtran/cct/internal/index" ) // setupFixtures creates a fake ~/.claude tree with session, plan, and changelog @@ -19,6 +21,7 @@ func setupFixtures(t *testing.T) string { t.Helper() home := t.TempDir() t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) claudeDir := filepath.Join(home, ".claude") projectsDir := filepath.Join(claudeDir, "projects") @@ -152,6 +155,9 @@ func TestSearchCmd_JSON(t *testing.T) { if len(results) == 0 { t.Fatal("expected at least 1 search result") } + if _, ok := results[0]["session"]; !ok { + t.Fatal("expected session field in result") + } } func TestSearchCmd_NoResults(t *testing.T) { @@ -559,6 +565,92 @@ func TestShellQuote(t *testing.T) { } } +func TestFormatSyncResult(t *testing.T) { + tests := []struct { + name string + result *index.SyncResult + want string + }{ + { + "up to date with sessions", + &index.SyncResult{Unchanged: 100}, + "Already up to date (100 sessions)", + }, + { + "up to date empty index", + &index.SyncResult{}, + "Already up to date", + }, + { + "only new", + &index.SyncResult{Added: 3, Unchanged: 97}, + "Synced 3 new (97 unchanged)", + }, + { + "only updated", + &index.SyncResult{Updated: 2, Unchanged: 98}, + "Synced 2 updated (98 unchanged)", + }, + { + "new and updated", + &index.SyncResult{Added: 3, Updated: 2, Unchanged: 95}, + "Synced 3 new, 2 updated (95 unchanged)", + }, + { + "all types", + &index.SyncResult{Added: 1, Updated: 2, Deleted: 3, Unchanged: 94}, + "Synced 1 new, 2 updated, 3 deleted (94 unchanged)", + }, + { + "changes with zero unchanged", + &index.SyncResult{Added: 5}, + "Synced 5 new", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := formatSyncResult(tt.result) + if got != tt.want { + t.Errorf("formatSyncResult() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestSearchCmd_ProjectNotFound(t *testing.T) { + setupFixtures(t) + + globals := &Globals{JSON: false} + cmd := &SearchCmd{Query: "database", Project: "nonexistent_xyz"} + + out := captureStdout(t, func() { + if err := cmd.Run(globals); err != nil { + t.Fatal(err) + } + }) + + if !strings.Contains(out, `No project matching "nonexistent_xyz"`) { + t.Errorf("expected 'No project matching' message, got: %q", out) + } +} + +func TestSearchCmd_ProjectExistsNoQueryMatch(t *testing.T) { + setupFixtures(t) + + globals := &Globals{JSON: false} + cmd := &SearchCmd{Query: "zzz_impossible_term_zzz", Project: "myproject"} + + out := captureStdout(t, func() { + if err := cmd.Run(globals); err != nil { + t.Fatal(err) + } + }) + + if !strings.Contains(out, `No sessions matching`) || !strings.Contains(out, `in project "myproject"`) { + t.Errorf("expected 'No sessions matching ... in project' message, got: %q", out) + } +} + func TestExitError(t *testing.T) { err := &ExitError{Code: 42} if err.Error() != "exit status 42" { diff --git a/internal/app/cli.go b/internal/app/cli.go index 2a9915f..99525c8 100644 --- a/internal/app/cli.go +++ b/internal/app/cli.go @@ -31,6 +31,7 @@ type CLI struct { Changelog ChangelogCmd `cmd:"" aliases:"log" help:"Show Claude Code changelog"` VersionInfo VersionCmd `cmd:"" name:"version" help:"Show version information"` Schema SchemaCmd `cmd:"" help:"Show CLI schema as JSON (for tooling)"` + Index IndexCmd `cmd:"" help:"Manage search index"` } type Globals struct { diff --git a/internal/app/index.go b/internal/app/index.go new file mode 100644 index 0000000..0f3765f --- /dev/null +++ b/internal/app/index.go @@ -0,0 +1,145 @@ +package app + +import ( + "encoding/json" + "fmt" + "os" + "strings" + + "github.com/andyhtran/cct/internal/index" + "github.com/andyhtran/cct/internal/output" +) + +type IndexCmd struct { + Sync IndexSyncCmd `cmd:"" help:"Sync index with latest sessions"` + Rebuild IndexRebuildCmd `cmd:"" help:"Rebuild index from scratch"` + Status IndexStatusCmd `cmd:"" help:"Show index status"` +} + +type IndexSyncCmd struct { + NoAgents bool `help:"Exclude sub-agent sessions" name:"no-agents"` +} + +func (cmd *IndexSyncCmd) Run(globals *Globals) error { + idx, err := index.Open() + if err != nil { + return fmt.Errorf("open index: %w", err) + } + defer func() { _ = idx.Close() }() + + result, err := idx.SyncWithProgress(!cmd.NoAgents, true, os.Stderr) + if err != nil { + return fmt.Errorf("sync: %w", err) + } + + if globals.JSON { + status, err := idx.Status() + if err != nil { + return fmt.Errorf("status: %w", err) + } + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(status) + } + + fmt.Println(formatSyncResult(result)) + return nil +} + +type IndexRebuildCmd struct { + NoAgents bool `help:"Exclude sub-agent sessions" name:"no-agents"` +} + +func (cmd *IndexRebuildCmd) Run(globals *Globals) error { + idx, err := index.Open() + if err != nil { + return fmt.Errorf("open index: %w", err) + } + defer func() { _ = idx.Close() }() + + result, err := idx.RebuildWithProgress(!cmd.NoAgents, os.Stderr) + if err != nil { + return fmt.Errorf("rebuild: %w", err) + } + + if globals.JSON { + status, err := idx.Status() + if err != nil { + return fmt.Errorf("status: %w", err) + } + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(status) + } + + fmt.Printf("Indexed %d sessions\n", result.Added) + return nil +} + +type IndexStatusCmd struct{} + +func (cmd *IndexStatusCmd) Run(globals *Globals) error { + idx, err := index.Open() + if err != nil { + return fmt.Errorf("open index: %w", err) + } + defer func() { _ = idx.Close() }() + + status, err := idx.Status() + if err != nil { + return fmt.Errorf("status: %w", err) + } + + if globals.JSON { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(status) + } + + fmt.Printf("Index: %s\n", status.Path) + fmt.Printf("Sessions: %d\n", status.TotalSessions) + fmt.Printf("Messages: %d\n", status.TotalMessages) + fmt.Printf("Size: %s\n", formatBytes(status.IndexSizeBytes)) + if !status.LastSyncTime.IsZero() { + fmt.Printf("Last sync: %s\n", output.FormatAge(status.LastSyncTime)) + } + return nil +} + +func formatSyncResult(r *index.SyncResult) string { + if r.UpToDate() { + if r.Unchanged > 0 { + return fmt.Sprintf("Already up to date (%d sessions)", r.Unchanged) + } + return "Already up to date" + } + + var parts []string + if r.Added > 0 { + parts = append(parts, fmt.Sprintf("%d new", r.Added)) + } + if r.Updated > 0 { + parts = append(parts, fmt.Sprintf("%d updated", r.Updated)) + } + if r.Deleted > 0 { + parts = append(parts, fmt.Sprintf("%d deleted", r.Deleted)) + } + summary := "Synced " + strings.Join(parts, ", ") + if r.Unchanged > 0 { + summary += fmt.Sprintf(" (%d unchanged)", r.Unchanged) + } + return summary +} + +func formatBytes(b int64) string { + const unit = 1024 + if b < unit { + return fmt.Sprintf("%d B", b) + } + div, exp := int64(unit), 0 + for n := b / unit; n >= unit; n /= unit { + div *= unit + exp++ + } + return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "KMGTPE"[exp]) +} diff --git a/internal/app/search.go b/internal/app/search.go index b5470d6..757c584 100644 --- a/internal/app/search.go +++ b/internal/app/search.go @@ -4,8 +4,8 @@ import ( "encoding/json" "fmt" "os" - "sort" + "github.com/andyhtran/cct/internal/index" "github.com/andyhtran/cct/internal/output" "github.com/andyhtran/cct/internal/session" ) @@ -26,6 +26,15 @@ func formatMatchRole(m session.Match) string { return output.Dim(tag) + " " + m.Snippet } +func makeSearchTable(query string) *output.Table { + return output.NewTable(query, + output.Fixed("SESSION", 16), + output.Flex("PROJECT", 25, 15), + output.Fixed("AGE", 6), + output.Flex("MATCH", 0, 30), + ) +} + type SearchCmd struct { Query string `arg:"" help:"Search query"` Project string `short:"p" help:"Filter by project name"` @@ -34,84 +43,147 @@ type SearchCmd struct { All bool `short:"a" help:"Show all results"` MaxMatches int `short:"m" help:"Max matches per session" default:"3"` Context int `short:"C" help:"Extra context characters for snippets" default:"0"` + Sort string `help:"Sort order: recency (default), relevance" default:"recency" enum:"recency,relevance"` NoAgents bool `help:"Exclude sub-agent sessions" name:"no-agents"` + Sync bool `help:"Force index sync before searching"` } func (cmd *SearchCmd) Run(globals *Globals) error { - tbl := output.NewTable(cmd.Query, - output.Fixed("SESSION", 16), - output.Flex("PROJECT", 25, 15), - output.Fixed("AGE", 6), - output.Flex("MATCH", 0, 30), - ) - - var files []string + // Single-session search mode uses streaming (no index needed) if cmd.Session != "" { - s, err := session.FindByPrefix(cmd.Session) - if err != nil { - return err + return cmd.runSessionSearch(globals) + } + + idx, err := index.Open() + if err != nil { + return fmt.Errorf("open index: %w", err) + } + defer func() { _ = idx.Close() }() + + includeAgents := !cmd.NoAgents + + if cmd.Sync { + if err := idx.ForceSync(includeAgents); err != nil { + return fmt.Errorf("sync: %w", err) } - files = []string{s.FilePath} - } else { - files = session.DiscoverFiles(cmd.Project, !cmd.NoAgents) - if !globals.JSON && len(files) > 50 { - fmt.Fprintf(os.Stderr, "Searching %d sessions...\n", len(files)) + } + + if !globals.JSON { + if status, err := idx.Status(); err == nil && status.TotalSessions == 0 { + fmt.Fprintln(os.Stderr, "Building search index...") } } - results := session.SearchFiles(files, cmd.Query, tbl.LastColWidth()+cmd.Context, cmd.MaxMatches) - sort.Slice(results, func(i, j int) bool { - return results[i].Session.Modified.After(results[j].Session.Modified) - }) + tbl := makeSearchTable(cmd.Query) - if !cmd.All && cmd.Limit > 0 && len(results) > cmd.Limit { - total := len(results) - results = results[:cmd.Limit] - if !globals.JSON { - fmt.Fprintf(os.Stderr, "Showing %d of %d results (use --all or -n to adjust)\n", cmd.Limit, total) - } + limit := cmd.Limit + if cmd.All { + limit = 0 + } + + results, total, err := idx.Search(index.SearchOptions{ + Query: cmd.Query, + ProjectFilter: cmd.Project, + IncludeAgents: includeAgents, + MaxResults: limit, + MaxMatches: cmd.MaxMatches, + SnippetWidth: tbl.LastColWidth() + cmd.Context, + SortBy: cmd.Sort, + }) + if err != nil { + return fmt.Errorf("search: %w", err) } if len(results) == 0 { - fmt.Printf(" No sessions matching %q\n", cmd.Query) + switch { + case cmd.Project != "" && !idx.ProjectExists(cmd.Project): + fmt.Printf(" No project matching %q\n", cmd.Project) + case cmd.Project != "": + fmt.Printf(" No sessions matching %q in project %q\n", cmd.Query, cmd.Project) + default: + fmt.Printf(" No sessions matching %q\n", cmd.Query) + } return nil } + if !globals.JSON && total > len(results) { + fmt.Fprintf(os.Stderr, "Showing %d of %d results (use --all or -n to adjust)\n", len(results), total) + } + if globals.JSON { enc := json.NewEncoder(os.Stdout) enc.SetIndent("", " ") return enc.Encode(results) } - fmt.Printf("\n Found %d session(s) matching %q\n", len(results), cmd.Query) + fmt.Printf("\n Found %d session(s) matching %q\n", total, cmd.Query) fmt.Println() tbl.PrintHeader() + printResults(results, tbl) + fmt.Println() + sessions := make([]*session.Session, 0, len(results)) for _, r := range results { - s := r.Session - projectName := s.ProjectName - if s.IsAgent { - projectName += " (agent)" - } - for i, m := range r.Matches { - display := formatMatchRole(m) - if i == 0 { - tbl.Row( - []string{s.ShortID, output.Truncate(projectName, tbl.ColWidth(1)), output.FormatAge(s.Modified), display}, - []func(string) string{output.Dim, output.Bold, output.Dim, nil}, - ) - } else { - tbl.Continuation(display) - } - } + sessions = append(sessions, r.Session) + } + printResumeHints(sessions) + fmt.Println() + return nil +} + +// runSessionSearch searches within a specific session using streaming (for -s flag) +func (cmd *SearchCmd) runSessionSearch(globals *Globals) error { + s, err := session.FindByPrefix(cmd.Session) + if err != nil { + return err + } + + tbl := makeSearchTable(cmd.Query) + results := session.SearchFiles([]string{s.FilePath}, cmd.Query, tbl.LastColWidth()+cmd.Context, cmd.MaxMatches) + + if len(results) == 0 { + fmt.Printf(" No matches for %q in session %s\n", cmd.Query, s.ShortID) + return nil } + if globals.JSON { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(results) + } + + fmt.Printf("\n Found %d match(es) for %q in session %s\n", len(results[0].Matches), cmd.Query, s.ShortID) fmt.Println() - sessions := make([]*session.Session, len(results)) - for i, r := range results { - sessions[i] = r.Session + tbl.PrintHeader() + + for _, r := range results { + printSessionMatches(r.Session, r.Matches, tbl) } - printResumeHints(sessions) + fmt.Println() return nil } + +func printResults(results []index.SearchResult, tbl *output.Table) { + for _, r := range results { + printSessionMatches(r.Session, r.Matches, tbl) + } +} + +func printSessionMatches(s *session.Session, matches []session.Match, tbl *output.Table) { + projectName := s.ProjectName + if s.IsAgent { + projectName += " (agent)" + } + for i, m := range matches { + display := formatMatchRole(m) + if i == 0 { + tbl.Row( + []string{s.ShortID, output.Truncate(projectName, tbl.ColWidth(1)), output.FormatAge(s.Modified), display}, + []func(string) string{output.Dim, output.Bold, output.Dim, nil}, + ) + } else { + tbl.Continuation(display) + } + } +} diff --git a/internal/index/index.go b/internal/index/index.go new file mode 100644 index 0000000..7c87d6b --- /dev/null +++ b/internal/index/index.go @@ -0,0 +1,108 @@ +package index + +import ( + "database/sql" + "fmt" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/andyhtran/cct/internal/paths" + _ "modernc.org/sqlite" +) + +type Index struct { + db *sql.DB + path string + lastSyncTime time.Time + syncMu sync.Mutex +} + +func Open() (*Index, error) { + dbPath := paths.IndexPath() + + if err := os.MkdirAll(filepath.Dir(dbPath), 0o755); err != nil { + return nil, fmt.Errorf("create cache dir: %w", err) + } + + idx, err := openDB(dbPath) + if err != nil { + if !strings.Contains(err.Error(), "ensure schema") { + return nil, err + } + fmt.Fprintln(os.Stderr, "Recreating corrupted search index...") + for _, ext := range []string{"", "-wal", "-shm"} { + _ = os.Remove(dbPath + ext) + } + idx, err = openDB(dbPath) + if err != nil { + return nil, err + } + } + return idx, nil +} + +func openDB(dbPath string) (*Index, error) { + db, err := sql.Open("sqlite", dbPath+"?_pragma=journal_mode(WAL)&_pragma=busy_timeout(30000)") + if err != nil { + return nil, fmt.Errorf("open database: %w", err) + } + + idx := &Index{db: db, path: dbPath} + + if err := idx.ensureSchema(); err != nil { + _ = db.Close() + return nil, fmt.Errorf("ensure schema: %w", err) + } + + return idx, nil +} + +func (idx *Index) Close() error { + return idx.db.Close() +} + +func (idx *Index) Path() string { + return idx.path +} + +type IndexStatus struct { + Path string `json:"path"` + TotalSessions int `json:"total_sessions"` + TotalMessages int `json:"total_messages"` + LastSyncTime time.Time `json:"last_sync_time"` + IndexSizeBytes int64 `json:"index_size_bytes"` +} + +func (idx *Index) Status() (*IndexStatus, error) { + var sessions, messages int + + if err := idx.db.QueryRow("SELECT COUNT(*) FROM sessions").Scan(&sessions); err != nil { + return nil, err + } + + if err := idx.db.QueryRow("SELECT COUNT(*) FROM content_map").Scan(&messages); err != nil { + return nil, err + } + + var size int64 + if info, err := os.Stat(idx.path); err == nil { + size = info.Size() + } + + var lastSync time.Time + var lastSyncStr string + if err := idx.db.QueryRow("SELECT value FROM index_meta WHERE key = 'last_sync_time'").Scan(&lastSyncStr); err == nil { + lastSync, _ = time.Parse(time.RFC3339Nano, lastSyncStr) + } + + return &IndexStatus{ + Path: idx.path, + TotalSessions: sessions, + TotalMessages: messages, + LastSyncTime: lastSync, + IndexSizeBytes: size, + }, nil +} diff --git a/internal/index/index_test.go b/internal/index/index_test.go new file mode 100644 index 0000000..12a1399 --- /dev/null +++ b/internal/index/index_test.go @@ -0,0 +1,642 @@ +//go:build darwin || linux + +package index + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +func setupTestIndex(t *testing.T) *Index { + t.Helper() + home := t.TempDir() + t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) + + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-myproject") + if err := os.MkdirAll(projDir, 0o755); err != nil { + t.Fatal(err) + } + + sessionLines := []string{ + `{"type":"user","message":{"role":"user","content":"fix the pre-commit hook and don't forget fmt.Println"},"cwd":"/Users/test/myproject","gitBranch":"main","sessionId":"aaaa1111-2222-3333-4444-555555555555","timestamp":"2026-02-01T08:00:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"I'll fix the pre-commit hook. It doesn't need fmt.Println here."}]},"timestamp":"2026-02-01T08:00:05Z"}`, + `{"type":"user","message":{"role":"user","content":"now add tests for the parser"},"timestamp":"2026-02-01T08:01:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"Done, tests added for the parser."}]},"timestamp":"2026-02-01T08:01:05Z"}`, + } + + sessionPath := filepath.Join(projDir, "aaaa1111-2222-3333-4444-555555555555.jsonl") + f, err := os.Create(sessionPath) + if err != nil { + t.Fatal(err) + } + for _, line := range sessionLines { + if _, err := fmt.Fprintln(f, line); err != nil { + t.Fatal(err) + } + } + _ = f.Close() + + idx, err := Open() + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { _ = idx.Close() }) + + if err := idx.ForceSync(true); err != nil { + t.Fatal(err) + } + + return idx +} + +func TestComputeChanges(t *testing.T) { + t.Run("all unchanged", func(t *testing.T) { + current := map[string]fileInfo{ + "/a.jsonl": {modified: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), size: 100}, + } + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), fileSize: 100}, + } + toAdd, toUpdate, toDelete := computeChanges(current, indexed) + if len(toAdd) != 0 || len(toUpdate) != 0 || len(toDelete) != 0 { + t.Errorf("expected no changes, got add=%d update=%d delete=%d", len(toAdd), len(toUpdate), len(toDelete)) + } + }) + + t.Run("new file", func(t *testing.T) { + current := map[string]fileInfo{ + "/a.jsonl": {modified: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), size: 100}, + "/b.jsonl": {modified: time.Date(2026, 1, 2, 0, 0, 0, 0, time.UTC), size: 200}, + } + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), fileSize: 100}, + } + toAdd, toUpdate, toDelete := computeChanges(current, indexed) + if len(toAdd) != 1 || toAdd[0] != "/b.jsonl" { + t.Errorf("expected 1 add (/b.jsonl), got %v", toAdd) + } + if len(toUpdate) != 0 || len(toDelete) != 0 { + t.Errorf("expected no updates/deletes, got update=%d delete=%d", len(toUpdate), len(toDelete)) + } + }) + + t.Run("modified time triggers update", func(t *testing.T) { + current := map[string]fileInfo{ + "/a.jsonl": {modified: time.Date(2026, 1, 2, 0, 0, 0, 0, time.UTC), size: 100}, + } + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), fileSize: 100}, + } + _, toUpdate, _ := computeChanges(current, indexed) + if len(toUpdate) != 1 { + t.Errorf("expected 1 update for modified time, got %d", len(toUpdate)) + } + }) + + t.Run("size change triggers update", func(t *testing.T) { + ts := time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC) + current := map[string]fileInfo{ + "/a.jsonl": {modified: ts, size: 200}, + } + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: ts, fileSize: 100}, + } + _, toUpdate, _ := computeChanges(current, indexed) + if len(toUpdate) != 1 { + t.Errorf("expected 1 update for size change, got %d", len(toUpdate)) + } + }) + + t.Run("deleted file", func(t *testing.T) { + current := map[string]fileInfo{} + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: time.Date(2026, 1, 1, 0, 0, 0, 0, time.UTC), fileSize: 100}, + } + _, _, toDelete := computeChanges(current, indexed) + if len(toDelete) != 1 { + t.Errorf("expected 1 delete, got %d", len(toDelete)) + } + }) + + t.Run("same-second timestamps are unchanged", func(t *testing.T) { + current := map[string]fileInfo{ + "/a.jsonl": {modified: time.Date(2026, 1, 1, 12, 0, 0, 0, time.UTC), size: 100}, + } + indexed := map[string]indexedFile{ + "/a.jsonl": {sessionID: "aaa", modifiedAt: time.Date(2026, 1, 1, 12, 0, 0, 0, time.UTC), fileSize: 100}, + } + toAdd, toUpdate, toDelete := computeChanges(current, indexed) + if len(toAdd) != 0 || len(toUpdate) != 0 || len(toDelete) != 0 { + t.Error("same-second timestamps should not trigger an update") + } + }) +} + +func TestSyncResult_UpToDate(t *testing.T) { + tests := []struct { + name string + result SyncResult + want bool + }{ + {"all zero", SyncResult{}, true}, + {"unchanged only", SyncResult{Unchanged: 100}, true}, + {"has added", SyncResult{Added: 1, Unchanged: 99}, false}, + {"has updated", SyncResult{Updated: 1, Unchanged: 99}, false}, + {"has deleted", SyncResult{Deleted: 1, Unchanged: 99}, false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.result.UpToDate(); got != tt.want { + t.Errorf("UpToDate() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestSyncIncremental(t *testing.T) { + idx := setupTestIndex(t) + + result, err := idx.SyncWithProgress(true, true, nil) + if err != nil { + t.Fatal(err) + } + if !result.UpToDate() { + t.Errorf("second sync should be up to date, got add=%d update=%d delete=%d", + result.Added, result.Updated, result.Deleted) + } + if result.Unchanged != 1 { + t.Errorf("expected 1 unchanged session, got %d", result.Unchanged) + } + + home := os.Getenv("HOME") + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-myproject") + sessionPath := filepath.Join(projDir, "aaaa1111-2222-3333-4444-555555555555.jsonl") + f, err := os.OpenFile(sessionPath, os.O_APPEND|os.O_WRONLY, 0o644) + if err != nil { + t.Fatal(err) + } + _, _ = fmt.Fprintln(f, `{"type":"user","message":{"role":"user","content":"one more message"},"timestamp":"2026-02-01T08:02:00Z"}`) + _ = f.Close() + + result, err = idx.SyncWithProgress(true, true, nil) + if err != nil { + t.Fatal(err) + } + if result.Updated != 1 { + t.Errorf("expected 1 updated after file change, got %d", result.Updated) + } + if result.Added != 0 { + t.Errorf("expected 0 added, got %d", result.Added) + } +} + +func TestProjectExists(t *testing.T) { + idx := setupTestIndex(t) + + if !idx.ProjectExists("myproject") { + t.Error("expected ProjectExists to return true for indexed project") + } + if idx.ProjectExists("nonexistent_project") { + t.Error("expected ProjectExists to return false for non-existent project") + } +} + +func TestSanitizeFTSTerm(t *testing.T) { + tests := []struct { + input string + want string + }{ + {"hello", "hello"}, + {"Hello", "hello"}, + {"pre-commit", "pre commit"}, + {"snake_case", "snake case"}, + {"fmt.Println", "fmt println"}, + {"don't", "don t"}, + {"!!!", ""}, + {"", ""}, + {"abc123", "abc123"}, + {"a--b__c..d", "a b c d"}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got := sanitizeFTSTerm(tt.input) + if got != tt.want { + t.Errorf("sanitizeFTSTerm(%q) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestBuildFTSQuery(t *testing.T) { + tests := []struct { + input string + want string + }{ + {"hello", "hello*"}, + {"hello world", "hello world*"}, + {"pre-commit", "pre commit*"}, + {"don't", "don t*"}, + {"fmt.Println", "fmt println*"}, + {"!!!", ""}, + {"", ""}, + {" hello ", "hello*"}, + {"a b", "a b*"}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got := buildFTSQuery(tt.input) + if got != tt.want { + t.Errorf("buildFTSQuery(%q) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestFtsTokens(t *testing.T) { + tests := []struct { + input string + want []string + }{ + {"hello", []string{"hello"}}, + {"hello world", []string{"hello", "world"}}, + {"pre-commit", []string{"pre", "commit"}}, + {"fmt.Println", []string{"fmt", "println"}}, + {"!!!", nil}, + {"", nil}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got := ftsTokens(tt.input) + if len(got) != len(tt.want) { + t.Errorf("ftsTokens(%q) = %v, want %v", tt.input, got, tt.want) + return + } + for i := range got { + if got[i] != tt.want[i] { + t.Errorf("ftsTokens(%q)[%d] = %q, want %q", tt.input, i, got[i], tt.want[i]) + } + } + }) + } +} + +func TestBuildOrQuery(t *testing.T) { + tests := []struct { + tokens []string + want string + }{ + {[]string{"fix"}, "fix*"}, + {[]string{"fix", "bug"}, "fix OR bug*"}, + {[]string{"fmt", "println"}, "fmt OR println*"}, + {nil, ""}, + } + + for _, tt := range tests { + got := buildOrQuery(tt.tokens) + if got != tt.want { + t.Errorf("buildOrQuery(%v) = %q, want %q", tt.tokens, got, tt.want) + } + } +} + +func TestSearch_BasicMatch(t *testing.T) { + idx := setupTestIndex(t) + + results, total, err := idx.Search(SearchOptions{ + Query: "parser", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) == 0 { + t.Fatal("expected at least 1 result for 'parser'") + } + if total == 0 { + t.Fatal("expected total > 0") + } +} + +func TestSearch_NoResults(t *testing.T) { + idx := setupTestIndex(t) + + results, total, err := idx.Search(SearchOptions{ + Query: "zzz_nonexistent_zzz", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) != 0 { + t.Fatalf("expected 0 results, got %d", len(results)) + } + if total != 0 { + t.Fatalf("expected total 0, got %d", total) + } +} + +func TestSearch_Apostrophe(t *testing.T) { + idx := setupTestIndex(t) + + results, _, err := idx.Search(SearchOptions{ + Query: "doesn't", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatalf("apostrophe query must not crash: %v", err) + } + _ = results +} + +func TestSearch_HyphenatedTerms(t *testing.T) { + idx := setupTestIndex(t) + + results, _, err := idx.Search(SearchOptions{ + Query: "pre-commit", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) == 0 { + t.Fatal("expected results for 'pre-commit'") + } +} + +func TestSearch_DottedTerms(t *testing.T) { + idx := setupTestIndex(t) + + results, _, err := idx.Search(SearchOptions{ + Query: "fmt.Println", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) == 0 { + t.Fatal("expected results for 'fmt.Println'") + } +} + +func TestSearch_ProjectFilter(t *testing.T) { + idx := setupTestIndex(t) + + results, _, err := idx.Search(SearchOptions{ + Query: "parser", + ProjectFilter: "myproject", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) == 0 { + t.Fatal("expected results with project filter") + } +} + +func TestSearch_ProjectFilter_NoMatch(t *testing.T) { + idx := setupTestIndex(t) + + results, total, err := idx.Search(SearchOptions{ + Query: "parser", + ProjectFilter: "nonexistent", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) != 0 { + t.Fatalf("expected 0 results for non-matching project filter, got %d", len(results)) + } + if total != 0 { + t.Fatalf("expected total 0, got %d", total) + } +} + +func TestSearch_CompoundTermFiltering(t *testing.T) { + home := t.TempDir() + t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) + + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-compound") + if err := os.MkdirAll(projDir, 0o755); err != nil { + t.Fatal(err) + } + + // Session with literal "pre-commit" + writeTestSession(t, projDir, "match111-2222-3333-4444-555555555555", []string{ + `{"type":"user","message":{"role":"user","content":"fix the pre-commit hook"},"cwd":"/Users/test/compound","sessionId":"match111-2222-3333-4444-555555555555","timestamp":"2026-02-01T08:00:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"I'll fix the pre-commit hook."}]},"timestamp":"2026-02-01T08:00:05Z"}`, + }) + + // Session with "pre" and "commit" separately (should NOT match "pre-commit") + writeTestSession(t, projDir, "noise222-2222-3333-4444-555555555555", []string{ + `{"type":"user","message":{"role":"user","content":"please commit the pre existing changes"},"cwd":"/Users/test/compound","sessionId":"noise222-2222-3333-4444-555555555555","timestamp":"2026-02-01T09:00:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"Done, I will commit those pre existing changes now."}]},"timestamp":"2026-02-01T09:00:05Z"}`, + }) + + idx, err := Open() + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { _ = idx.Close() }) + + if err := idx.ForceSync(true); err != nil { + t.Fatal(err) + } + + results, _, err := idx.Search(SearchOptions{ + Query: "pre-commit", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) != 1 { + t.Fatalf("expected 1 result for 'pre-commit', got %d", len(results)) + } + if results[0].Session.ID != "match111-2222-3333-4444-555555555555" { + t.Errorf("expected matching session, got %s", results[0].Session.ID) + } +} + +func writeTestSession(t *testing.T, projDir, id string, lines []string) { + t.Helper() + path := filepath.Join(projDir, id+".jsonl") + f, err := os.Create(path) + if err != nil { + t.Fatal(err) + } + for _, line := range lines { + if _, err := fmt.Fprintln(f, line); err != nil { + t.Fatal(err) + } + } + _ = f.Close() +} + +func TestSearch_CrossMessageMultiTerm(t *testing.T) { + home := t.TempDir() + t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) + + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-cross") + if err := os.MkdirAll(projDir, 0o755); err != nil { + t.Fatal(err) + } + + // Session where "fix" and "bug" are in different messages + writeTestSession(t, projDir, "cross111-2222-3333-4444-555555555555", []string{ + `{"type":"user","message":{"role":"user","content":"please fix the login page"},"cwd":"/Users/test/cross","sessionId":"cross111-2222-3333-4444-555555555555","timestamp":"2026-02-01T08:00:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"Found a bug in the auth handler, fixing now."}]},"timestamp":"2026-02-01T08:00:05Z"}`, + }) + + // Session where neither term appears + writeTestSession(t, projDir, "unrel222-2222-3333-4444-555555555555", []string{ + `{"type":"user","message":{"role":"user","content":"add documentation for the API"},"cwd":"/Users/test/cross","sessionId":"unrel222-2222-3333-4444-555555555555","timestamp":"2026-02-01T09:00:00Z"}`, + `{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"Documentation added."}]},"timestamp":"2026-02-01T09:00:05Z"}`, + }) + + idx, err := Open() + if err != nil { + t.Fatal(err) + } + t.Cleanup(func() { _ = idx.Close() }) + + if err := idx.ForceSync(true); err != nil { + t.Fatal(err) + } + + results, _, err := idx.Search(SearchOptions{ + Query: "fix bug", + IncludeAgents: true, + MaxResults: 10, + }) + if err != nil { + t.Fatal(err) + } + if len(results) != 1 { + t.Fatalf("expected 1 result for cross-message 'fix bug', got %d", len(results)) + } + if results[0].Session.ID != "cross111-2222-3333-4444-555555555555" { + t.Errorf("expected cross-message session, got %s", results[0].Session.ID) + } +} + +func TestSearch_TotalCount(t *testing.T) { + idx := setupTestIndex(t) + + results, total, err := idx.Search(SearchOptions{ + Query: "fix", + IncludeAgents: true, + MaxResults: 1, + }) + if err != nil { + t.Fatal(err) + } + if len(results) > 1 { + t.Fatalf("expected at most 1 result, got %d", len(results)) + } + if total < len(results) { + t.Fatalf("total (%d) must be >= len(results) (%d)", total, len(results)) + } +} + +func TestOpen_CorruptionRecovery(t *testing.T) { + home := t.TempDir() + t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) + + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-p") + cacheDir := filepath.Join(home, ".cache", "cct") + for _, d := range []string{projDir, cacheDir} { + if err := os.MkdirAll(d, 0o755); err != nil { + t.Fatal(err) + } + } + + dbPath := filepath.Join(cacheDir, "index.db") + if err := os.WriteFile(dbPath, []byte("corrupt garbage data"), 0o644); err != nil { + t.Fatal(err) + } + + idx, err := Open() + if err != nil { + t.Fatalf("Open() should recover from corruption, got: %v", err) + } + defer func() { _ = idx.Close() }() + + var version int + if err := idx.db.QueryRow("PRAGMA user_version").Scan(&version); err != nil { + t.Fatal(err) + } + if version != 6 { + t.Errorf("expected schema version 6 after recovery, got %d", version) + } +} + +func TestEnsureSchema_FreshDB(t *testing.T) { + home := t.TempDir() + t.Setenv("HOME", home) + t.Setenv("XDG_CACHE_HOME", filepath.Join(home, ".cache")) + + projDir := filepath.Join(home, ".claude", "projects", "-Users-test-p") + if err := os.MkdirAll(projDir, 0o755); err != nil { + t.Fatal(err) + } + + idx, err := Open() + if err != nil { + t.Fatal(err) + } + defer func() { _ = idx.Close() }() + + var version int + if err := idx.db.QueryRow("PRAGMA user_version").Scan(&version); err != nil { + t.Fatal(err) + } + if version != 6 { + t.Errorf("expected schema version 6, got %d", version) + } + + var count int + if err := idx.db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='sessions'").Scan(&count); err != nil { + t.Fatal(err) + } + if count != 1 { + t.Error("sessions table not created") + } +} + +func TestRebuildWithProgress(t *testing.T) { + idx := setupTestIndex(t) + + var buf strings.Builder + if _, err := idx.RebuildWithProgress(true, &buf); err != nil { + t.Fatal(err) + } + + status, err := idx.Status() + if err != nil { + t.Fatal(err) + } + if status.TotalSessions == 0 { + t.Error("expected sessions after rebuild") + } +} diff --git a/internal/index/lock.go b/internal/index/lock.go new file mode 100644 index 0000000..36b5efd --- /dev/null +++ b/internal/index/lock.go @@ -0,0 +1,50 @@ +package index + +import ( + "fmt" + "os" + "syscall" + "time" +) + +const lockTimeout = 5 * time.Second + +type fileLock struct { + f *os.File +} + +func acquireLock(path string) (*fileLock, error) { + f, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, 0o644) + if err != nil { + return nil, fmt.Errorf("open lock file: %w", err) + } + + if err := syscall.Flock(int(f.Fd()), syscall.LOCK_EX|syscall.LOCK_NB); err == nil { + return &fileLock{f: f}, nil + } + + type result struct{ err error } + ch := make(chan result, 1) + go func() { + ch <- result{syscall.Flock(int(f.Fd()), syscall.LOCK_EX)} + }() + + select { + case r := <-ch: + if r.err != nil { + _ = f.Close() + return nil, fmt.Errorf("acquire lock: %w", r.err) + } + return &fileLock{f: f}, nil + case <-time.After(lockTimeout): + _ = f.Close() + return nil, fmt.Errorf("timeout waiting for index lock (another sync may be running)") + } +} + +func (l *fileLock) release() { + if l.f != nil { + _ = syscall.Flock(int(l.f.Fd()), syscall.LOCK_UN) + _ = l.f.Close() + } +} diff --git a/internal/index/schema.go b/internal/index/schema.go new file mode 100644 index 0000000..7c81300 --- /dev/null +++ b/internal/index/schema.go @@ -0,0 +1,190 @@ +package index + +import "database/sql" + +const schemaSQL = ` +CREATE TABLE IF NOT EXISTS sessions ( + id TEXT PRIMARY KEY, + file_path TEXT NOT NULL UNIQUE, + project_dir TEXT NOT NULL, + project_name TEXT NOT NULL, + project_path TEXT NOT NULL, + is_agent INTEGER NOT NULL DEFAULT 0, + modified_at TEXT NOT NULL, + file_size INTEGER NOT NULL, + first_prompt TEXT, + created_at TEXT, + git_branch TEXT, + message_count INTEGER NOT NULL DEFAULT 0 +); + +CREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_dir); +CREATE INDEX IF NOT EXISTS idx_sessions_modified ON sessions(modified_at DESC); + +CREATE TABLE IF NOT EXISTS content_map ( + rowid INTEGER PRIMARY KEY, + session_id TEXT NOT NULL, + role TEXT NOT NULL, + source TEXT, + byte_offset INTEGER NOT NULL, + byte_length INTEGER NOT NULL +); + +CREATE VIRTUAL TABLE IF NOT EXISTS content_fts USING fts5( + text, + content='', + contentless_delete=1, + tokenize='porter unicode61' +); + +CREATE INDEX IF NOT EXISTS idx_content_map_session ON content_map(session_id); + +CREATE TABLE IF NOT EXISTS index_meta ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL +); +` + +func (idx *Index) ensureSchema() error { + var version int + err := idx.db.QueryRow("PRAGMA user_version").Scan(&version) + if err != nil { + return err + } + + if version == 0 { + if _, err := idx.db.Exec(schemaSQL); err != nil { + return err + } + if _, err := idx.db.Exec("PRAGMA user_version = 6"); err != nil { + return err + } + return nil + } + + if version < 5 { + tx, err := idx.db.Begin() + if err != nil { + return err + } + defer func() { _ = tx.Rollback() }() + + for _, stmt := range []string{ + "DROP TABLE IF EXISTS content_fts", + "DROP TABLE IF EXISTS content_raw", + "DROP TABLE IF EXISTS content_map", + "DELETE FROM sessions", + } { + if _, err := tx.Exec(stmt); err != nil { + return err + } + } + if _, err := tx.Exec(` + CREATE TABLE content_map ( + rowid INTEGER PRIMARY KEY, + session_id TEXT NOT NULL, + role TEXT NOT NULL, + source TEXT, + byte_offset INTEGER NOT NULL, + byte_length INTEGER NOT NULL + ) + `); err != nil { + return err + } + if _, err := tx.Exec("CREATE INDEX idx_content_map_session ON content_map(session_id)"); err != nil { + return err + } + if _, err := tx.Exec(` + CREATE TABLE IF NOT EXISTS index_meta ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) + `); err != nil { + return err + } + if _, err := tx.Exec("DELETE FROM index_meta WHERE key = 'last_sync_time'"); err != nil { + return err + } + if err := tx.Commit(); err != nil { + return err + } + + if _, err := idx.db.Exec(` + CREATE VIRTUAL TABLE content_fts USING fts5( + text, + content='', + contentless_delete=1, + tokenize='porter unicode61' + ) + `); err != nil { + return err + } + if _, err := idx.db.Exec("VACUUM"); err != nil { + return err + } + if _, err := idx.db.Exec("PRAGMA user_version = 5"); err != nil { + return err + } + version = 5 + } + + if version < 6 { + tx, err := idx.db.Begin() + if err != nil { + return err + } + defer func() { _ = tx.Rollback() }() + + for _, col := range []string{ + "ALTER TABLE sessions ADD COLUMN first_prompt TEXT", + "ALTER TABLE sessions ADD COLUMN created_at TEXT", + "ALTER TABLE sessions ADD COLUMN git_branch TEXT", + "ALTER TABLE sessions ADD COLUMN message_count INTEGER NOT NULL DEFAULT 0", + } { + _, _ = tx.Exec(col) + } + if _, err := tx.Exec("DELETE FROM index_meta WHERE key = 'last_sync_time'"); err != nil { + return err + } + if err := tx.Commit(); err != nil { + return err + } + if _, err := idx.db.Exec("PRAGMA user_version = 6"); err != nil { + return err + } + } + + return nil +} + +func (idx *Index) deleteSessionData(tx *sql.Tx, sessionID string) error { + // For contentless_delete=1 tables, use standard DELETE syntax + rows, err := tx.Query("SELECT rowid FROM content_map WHERE session_id = ?", sessionID) + if err != nil { + return err + } + var rowIDs []int64 + for rows.Next() { + var id int64 + if err := rows.Scan(&id); err != nil { + _ = rows.Close() + return err + } + rowIDs = append(rowIDs, id) + } + _ = rows.Close() + + for _, rowID := range rowIDs { + if _, err := tx.Exec("DELETE FROM content_fts WHERE rowid = ?", rowID); err != nil { + return err + } + } + + if _, err := tx.Exec("DELETE FROM content_map WHERE session_id = ?", sessionID); err != nil { + return err + } + if _, err := tx.Exec("DELETE FROM sessions WHERE id = ?", sessionID); err != nil { + return err + } + return nil +} diff --git a/internal/index/search.go b/internal/index/search.go new file mode 100644 index 0000000..2dc5de4 --- /dev/null +++ b/internal/index/search.go @@ -0,0 +1,553 @@ +package index + +import ( + "database/sql" + "encoding/json" + "fmt" + "io" + "os" + "strings" + "time" + + "github.com/andyhtran/cct/internal/output" + "github.com/andyhtran/cct/internal/session" +) + +type snippetLocation struct { + sessionID string + filePath string + role string + source string + byteOffset int64 + byteLength int +} + +type SearchOptions struct { + Query string + ProjectFilter string + IncludeAgents bool + MaxResults int + MaxMatches int + SnippetWidth int + SortBy string // "recency" (default) or "relevance" +} + +type SearchResult struct { + Session *session.Session `json:"session"` + Matches []session.Match `json:"matches"` + Score float64 `json:"score"` +} + +type sessionInfo struct { + sess *session.Session + score float64 +} + +func (idx *Index) Search(opts SearchOptions) ([]SearchResult, int, error) { + // Sync failure is non-fatal: search stale data rather than failing entirely. + if err := idx.Sync(opts.IncludeAgents); err != nil { + fmt.Fprintf(os.Stderr, "Warning: index sync failed: %v\n", err) + } + + results, total, err := idx.ftsSearch(opts) + if err != nil { + return nil, 0, err + } + + // FTS5 tokenizes on punctuation, so compound terms like "pre-commit" or + // "fmt.Println" may return zero FTS hits. Fall back to substring scan + // for these cases. Only triggers on single-word or compound queries to + // avoid slow full-scan on multi-word natural language queries. + if len(results) == 0 && (isSingleWord(opts.Query) || isCompoundQuery(opts.Query)) { + results = idx.substringSearch(opts) + total = len(results) + } + + return results, total, nil +} + +func (idx *Index) ProjectExists(name string) bool { + var count int + _ = idx.db.QueryRow( + "SELECT COUNT(*) FROM sessions WHERE LOWER(project_dir) LIKE '%' || LOWER(?) || '%'", + name, + ).Scan(&count) + return count > 0 +} + +func isSingleWord(query string) bool { + return len(strings.Fields(query)) == 1 +} + +func isCompoundQuery(query string) bool { + return strings.ContainsAny(query, ".-_") +} + +func compoundTerms(query string) []string { + var terms []string + for _, term := range strings.Fields(query) { + if strings.ContainsAny(term, ".-_") { + terms = append(terms, strings.ToLower(term)) + } + } + return terms +} + +func (idx *Index) substringSearch(opts SearchOptions) []SearchResult { + toSearch := session.DiscoverFiles(opts.ProjectFilter, opts.IncludeAgents) + if len(toSearch) == 0 { + return nil + } + + snippetWidth := opts.SnippetWidth + if snippetWidth <= 0 { + snippetWidth = 80 + } + maxMatches := opts.MaxMatches + if maxMatches <= 0 { + maxMatches = 3 + } + + limit := opts.MaxResults + if limit <= 0 { + limit = 25 + } + + streamResults := session.SearchFiles(toSearch, opts.Query, snippetWidth, maxMatches) + + results := make([]SearchResult, 0, len(streamResults)) + for _, sr := range streamResults { + if sr == nil || len(sr.Matches) == 0 { + continue + } + if len(results) >= limit { + break + } + results = append(results, SearchResult{ + Session: sr.Session, + Matches: sr.Matches, + Score: float64(len(sr.Matches)), + }) + } + return results +} + +func (idx *Index) ftsSearch(opts SearchOptions) ([]SearchResult, int, error) { + tokens := ftsTokens(opts.Query) + if len(tokens) == 0 { + return nil, 0, nil + } + + limit := opts.MaxResults + + compounds := compoundTerms(opts.Query) + projectFilter := strings.ToLower(opts.ProjectFilter) + multiTerm := len(tokens) > 1 + + // ftsLimit controls the SQL LIMIT clause. 0 means no limit. + // For compound queries, we need all FTS candidates since post-filtering + // may discard most of them. + ftsLimit := limit + if len(compounds) > 0 || limit <= 0 { + ftsLimit = 0 + } + + orderBy := "s.modified_at DESC" + if opts.SortBy == "relevance" { + orderBy = "m.match_count DESC, s.modified_at DESC" + } + + var totalMatched int + var sessionIDs []string + var sessions map[string]sessionInfo + + if multiTerm { + intersectSQL, intersectArgs := buildIntersectSQL(tokens) + orQuery := buildOrQuery(tokens) + + countQuery := ` + WITH session_pool AS (` + intersectSQL + `) + SELECT COUNT(*) FROM session_pool sp + JOIN sessions s ON sp.session_id = s.id + WHERE (? = 1 OR s.is_agent = 0) + AND (? = '' OR LOWER(s.project_dir) LIKE '%' || ? || '%') + ` + countArgs := make([]any, 0, len(intersectArgs)+3) + countArgs = append(countArgs, intersectArgs...) + countArgs = append(countArgs, boolToInt(opts.IncludeAgents), projectFilter, projectFilter) + _ = idx.db.QueryRow(countQuery, countArgs...).Scan(&totalMatched) + + mainQuery := ` + WITH session_pool AS (` + intersectSQL + `), + matches AS ( + SELECT sp.session_id, COUNT(*) as match_count + FROM session_pool sp + JOIN content_map m ON sp.session_id = m.session_id + WHERE m.rowid IN (SELECT rowid FROM content_fts WHERE content_fts MATCH ?) + GROUP BY sp.session_id + ) + SELECT + s.id, s.file_path, s.project_name, s.project_path, + s.is_agent, s.modified_at, + s.first_prompt, s.created_at, s.git_branch, s.message_count, + m.match_count + FROM sessions s + JOIN matches m ON s.id = m.session_id + WHERE (? = 1 OR s.is_agent = 0) + AND (? = '' OR LOWER(s.project_dir) LIKE '%' || ? || '%') + ORDER BY ` + orderBy + limitClause(ftsLimit) + ` + ` + mainArgs := make([]any, 0, len(intersectArgs)+5) + mainArgs = append(mainArgs, intersectArgs...) + mainArgs = append(mainArgs, orQuery, boolToInt(opts.IncludeAgents), projectFilter, projectFilter) + mainArgs = appendLimit(mainArgs, ftsLimit) + + var err error + sessionIDs, sessions, err = idx.scanSessionRows(mainQuery, mainArgs) + if err != nil { + return nil, 0, err + } + } else { + ftsQuery := tokens[0] + "*" + + countQuery := ` + SELECT COUNT(DISTINCT m.session_id) + FROM content_fts f + JOIN content_map m ON f.rowid = m.rowid + JOIN sessions s ON m.session_id = s.id + WHERE content_fts MATCH ? + AND (? = 1 OR s.is_agent = 0) + AND (? = '' OR LOWER(s.project_dir) LIKE '%' || ? || '%') + ` + _ = idx.db.QueryRow(countQuery, ftsQuery, boolToInt(opts.IncludeAgents), projectFilter, projectFilter).Scan(&totalMatched) + + mainQuery := ` + WITH matches AS ( + SELECT m.session_id, COUNT(*) as match_count + FROM content_fts f + JOIN content_map m ON f.rowid = m.rowid + WHERE content_fts MATCH ? + GROUP BY m.session_id + ) + SELECT + s.id, s.file_path, s.project_name, s.project_path, + s.is_agent, s.modified_at, + s.first_prompt, s.created_at, s.git_branch, s.message_count, + m.match_count + FROM sessions s + JOIN matches m ON s.id = m.session_id + WHERE (? = 1 OR s.is_agent = 0) + AND (? = '' OR LOWER(s.project_dir) LIKE '%' || ? || '%') + ORDER BY ` + orderBy + limitClause(ftsLimit) + ` + ` + + mainArgs := []any{ftsQuery, boolToInt(opts.IncludeAgents), projectFilter, projectFilter} + mainArgs = appendLimit(mainArgs, ftsLimit) + + var err error + sessionIDs, sessions, err = idx.scanSessionRows(mainQuery, mainArgs) + if err != nil { + return nil, 0, err + } + } + + if len(sessionIDs) == 0 { + return nil, 0, nil + } + + maxMatches := opts.MaxMatches + if maxMatches <= 0 { + maxMatches = 3 + } + snippetWidth := opts.SnippetWidth + if snippetWidth <= 0 { + snippetWidth = 80 + } + + snippetQuery := buildFTSQuery(opts.Query) + if multiTerm { + snippetQuery = buildOrQuery(tokens) + } + + snippetMap := idx.batchGetSnippets(sessionIDs, snippetQuery, maxMatches, snippetWidth, opts.Query) + + results := make([]SearchResult, 0, len(sessionIDs)) + for _, id := range sessionIDs { + matches := snippetMap[id] + if len(matches) == 0 && len(compounds) > 0 { + continue + } + info := sessions[id] + results = append(results, SearchResult{ + Session: info.sess, + Matches: matches, + Score: info.score, + }) + } + + if len(compounds) > 0 { + totalMatched = len(results) + } + + if limit > 0 && len(results) > limit { + results = results[:limit] + } + + return results, totalMatched, nil +} + +func (idx *Index) scanSessionRows(query string, args []any) ([]string, map[string]sessionInfo, error) { + rows, err := idx.db.Query(query, args...) + if err != nil { + return nil, nil, err + } + + var sessionIDs []string + sessions := make(map[string]sessionInfo) + + for rows.Next() { + var id, filePath, projectName, projectPath, modifiedStr string + var firstPrompt, createdAtStr, gitBranch sql.NullString + var isAgent, messageCount, matchCount int + + if err := rows.Scan(&id, &filePath, &projectName, &projectPath, &isAgent, &modifiedStr, + &firstPrompt, &createdAtStr, &gitBranch, &messageCount, &matchCount); err != nil { + _ = rows.Close() + return nil, nil, err + } + + modified, _ := time.Parse(time.RFC3339, modifiedStr) + var created time.Time + if createdAtStr.Valid { + created, _ = time.Parse(time.RFC3339, createdAtStr.String) + } + + sess := &session.Session{ + ID: id, + ShortID: session.ShortID(id), + IsAgent: isAgent == 1, + ProjectPath: projectPath, + ProjectName: projectName, + FilePath: filePath, + Modified: modified, + FirstPrompt: firstPrompt.String, + Created: created, + GitBranch: gitBranch.String, + MessageCount: messageCount, + } + + sessionIDs = append(sessionIDs, id) + sessions[id] = sessionInfo{ + sess: sess, + score: float64(matchCount), + } + } + if err := rows.Close(); err != nil { + return nil, nil, err + } + if err := rows.Err(); err != nil { + return nil, nil, err + } + + return sessionIDs, sessions, nil +} + +func (idx *Index) batchGetSnippets(sessionIDs []string, ftsQuery string, maxPerSession, width int, originalQuery string) map[string][]session.Match { + if len(sessionIDs) == 0 { + return nil + } + + placeholders := make([]string, len(sessionIDs)) + args := make([]any, len(sessionIDs)+1) + for i, id := range sessionIDs { + placeholders[i] = "?" + args[i] = id + } + args[len(sessionIDs)] = ftsQuery + + query := ` + SELECT m.session_id, s.file_path, m.role, m.source, m.byte_offset, m.byte_length + FROM content_map m + JOIN sessions s ON m.session_id = s.id + WHERE m.session_id IN (` + strings.Join(placeholders, ",") + `) + AND m.rowid IN (SELECT rowid FROM content_fts WHERE content_fts MATCH ?) + ORDER BY m.session_id, m.rowid + ` + + rows, err := idx.db.Query(query, args...) + if err != nil { + return nil + } + defer func() { _ = rows.Close() }() + + var locations []snippetLocation + for rows.Next() { + var loc snippetLocation + if err := rows.Scan(&loc.sessionID, &loc.filePath, &loc.role, &loc.source, &loc.byteOffset, &loc.byteLength); err != nil { + continue + } + locations = append(locations, loc) + } + if err := rows.Err(); err != nil { + return nil + } + + queryTerms := strings.Fields(strings.ToLower(originalQuery)) + firstTerm := "" + if len(queryTerms) > 0 { + firstTerm = queryTerms[0] + } + compounds := compoundTerms(originalQuery) + + byFile := make(map[string][]snippetLocation) + for _, loc := range locations { + byFile[loc.filePath] = append(byFile[loc.filePath], loc) + } + + result := make(map[string][]session.Match) + for filePath, fileLocs := range byFile { + f, err := os.Open(filePath) + if err != nil { + continue + } + for _, loc := range fileLocs { + if len(result[loc.sessionID]) >= maxPerSession { + continue + } + + text, err := readTextAt(f, loc.byteOffset, loc.byteLength) + if err != nil { + continue + } + + if len(compounds) > 0 { + textLower := strings.ToLower(text) + found := false + for _, ct := range compounds { + if strings.Contains(textLower, ct) { + found = true + break + } + } + if !found { + continue + } + } + + snippet := output.ExtractSnippet(text, firstTerm, width) + result[loc.sessionID] = append(result[loc.sessionID], session.Match{ + Role: loc.role, + Source: loc.source, + Snippet: snippet, + }) + } + _ = f.Close() + } + + return result +} + +func readTextAt(f *os.File, offset int64, length int) (string, error) { + if _, err := f.Seek(offset, io.SeekStart); err != nil { + return "", err + } + buf := make([]byte, length) + n, err := io.ReadFull(f, buf) + if err != nil && n == 0 { + return "", err + } + var obj map[string]any + if err := json.Unmarshal(buf[:n], &obj); err != nil { + return "", err + } + return session.ExtractPromptText(obj), nil +} + +// ftsTokens returns the individual sanitized FTS tokens for a query. +func ftsTokens(query string) []string { + query = strings.TrimSpace(query) + if query == "" { + return nil + } + + var allTokens []string + for _, term := range strings.Fields(query) { + sanitized := sanitizeFTSTerm(term) + if sanitized != "" { + allTokens = append(allTokens, strings.Fields(sanitized)...) + } + } + return allTokens +} + +// buildFTSQuery builds an implicit-AND FTS5 query with prefix matching on the last token. +func buildFTSQuery(query string) string { + tokens := ftsTokens(query) + if len(tokens) == 0 { + return "" + } + + tokens[len(tokens)-1] += "*" + return strings.Join(tokens, " ") +} + +// buildOrQuery builds an OR FTS5 query (any term matches) with prefix on the last token. +func buildOrQuery(tokens []string) string { + if len(tokens) == 0 { + return "" + } + out := make([]string, len(tokens)) + copy(out, tokens) + out[len(out)-1] += "*" + return strings.Join(out, " OR ") +} + +// buildIntersectSQL builds a per-term INTERSECT query that finds sessions +// containing ALL terms, even if the terms appear in different messages. +func buildIntersectSQL(tokens []string) (string, []any) { + parts := make([]string, 0, len(tokens)) + args := make([]any, 0, len(tokens)) + for i, t := range tokens { + if i == len(tokens)-1 { + t += "*" + } + parts = append(parts, ` + SELECT DISTINCT m.session_id + FROM content_fts f + JOIN content_map m ON f.rowid = m.rowid + WHERE content_fts MATCH ?`) + args = append(args, t) + } + return strings.Join(parts, "\nINTERSECT"), args +} + +func limitClause(limit int) string { + if limit <= 0 { + return "" + } + return "\nLIMIT ?" +} + +func appendLimit(args []any, limit int) []any { + if limit <= 0 { + return args + } + return append(args, limit) +} + +func sanitizeFTSTerm(term string) string { + var tokens []string + var current strings.Builder + for _, r := range term { + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') { + current.WriteRune(r) + } else if current.Len() > 0 { + tokens = append(tokens, current.String()) + current.Reset() + } + } + if current.Len() > 0 { + tokens = append(tokens, current.String()) + } + return strings.ToLower(strings.Join(tokens, " ")) +} diff --git a/internal/index/sync.go b/internal/index/sync.go new file mode 100644 index 0000000..4191e91 --- /dev/null +++ b/internal/index/sync.go @@ -0,0 +1,515 @@ +package index + +import ( + "database/sql" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "runtime" + "sync" + "time" + + "github.com/andyhtran/cct/internal/session" +) + +type fileInfo struct { + modified time.Time + size int64 +} + +type indexedFile struct { + sessionID string + modifiedAt time.Time + fileSize int64 +} + +type indexedMessage struct { + role string + source string + text string + byteOffset int64 + byteLength int +} + +type indexedSession struct { + session *session.Session + messages []indexedMessage + fileSize int64 +} + +type SyncResult struct { + Added int + Updated int + Deleted int + Unchanged int +} + +func (r *SyncResult) UpToDate() bool { + return r.Added == 0 && r.Updated == 0 && r.Deleted == 0 +} + +const ( + // Skip filesystem scan if synced recently. The --sync flag bypasses this. + syncCacheDuration = 5 * time.Minute + maxWorkers = 4 // Cap concurrent file parsers to limit memory + batchSize = 50 // Process sessions in batches to limit memory +) + +func (idx *Index) Sync(includeAgents bool) error { + _, err := idx.syncInternal(includeAgents, false, nil) + return err +} + +func (idx *Index) ForceSync(includeAgents bool) error { + _, err := idx.syncInternal(includeAgents, true, nil) + return err +} + +func (idx *Index) SyncWithProgress(includeAgents bool, force bool, w io.Writer) (*SyncResult, error) { + return idx.syncInternal(includeAgents, force, w) +} + +func (idx *Index) syncInternal(includeAgents bool, force bool, progress io.Writer) (*SyncResult, error) { + idx.syncMu.Lock() + defer idx.syncMu.Unlock() + + if !force && idx.recentlySynced() { + return &SyncResult{}, nil + } + + lock, err := acquireLock(idx.path + ".lock") + if err != nil { + return nil, err + } + defer lock.release() + + return idx.syncLocked(includeAgents, progress) +} + +func (idx *Index) syncLocked(includeAgents bool, progress io.Writer) (*SyncResult, error) { + current := discoverCurrentFiles(includeAgents) + indexed, err := idx.getIndexedFiles() + if err != nil { + return nil, fmt.Errorf("get indexed files: %w", err) + } + + toAdd, toUpdate, toDelete := computeChanges(current, indexed) + unchanged := len(indexed) - len(toUpdate) - len(toDelete) + if unchanged < 0 { + unchanged = 0 + } + + result := &SyncResult{ + Added: len(toAdd), + Updated: len(toUpdate), + Deleted: len(toDelete), + Unchanged: unchanged, + } + + if result.UpToDate() { + idx.updateSyncTime() + return result, nil + } + + total := len(toAdd) + len(toUpdate) + if progress != nil && total > 0 { + _, _ = fmt.Fprintf(progress, "Indexing %d session(s)...\n", total) + } + + tx, err := idx.db.Begin() + if err != nil { + return nil, fmt.Errorf("begin transaction: %w", err) + } + defer func() { _ = tx.Rollback() }() + + if err := idx.deleteRemovedSessions(tx, toDelete, indexed); err != nil { + return nil, err + } + + allPaths := make([]string, 0, len(toAdd)+len(toUpdate)) + allPaths = append(allPaths, toAdd...) + allPaths = append(allPaths, toUpdate...) + if err := idx.indexBatches(tx, allPaths, indexed, total, progress); err != nil { + return nil, err + } + + if err := tx.Commit(); err != nil { + return nil, err + } + + idx.updateSyncTime() + return result, nil +} + +func discoverCurrentFiles(includeAgents bool) map[string]fileInfo { + files := session.DiscoverFiles("", includeAgents) + current := make(map[string]fileInfo, len(files)) + for _, path := range files { + info, err := os.Stat(path) + if err != nil { + continue + } + current[path] = fileInfo{ + modified: info.ModTime().Truncate(time.Second), + size: info.Size(), + } + } + return current +} + +func computeChanges(current map[string]fileInfo, indexed map[string]indexedFile) (toAdd, toUpdate, toDelete []string) { + for path, info := range current { + if existing, ok := indexed[path]; !ok { + toAdd = append(toAdd, path) + } else if info.modified.After(existing.modifiedAt) || info.size != existing.fileSize { + toUpdate = append(toUpdate, path) + } + } + for path := range indexed { + if _, ok := current[path]; !ok { + toDelete = append(toDelete, path) + } + } + return +} + +func (idx *Index) deleteRemovedSessions(tx *sql.Tx, toDelete []string, indexed map[string]indexedFile) error { + for _, path := range toDelete { + sessionID := indexed[path].sessionID + if err := idx.deleteSessionData(tx, sessionID); err != nil { + return fmt.Errorf("delete session %s: %w", sessionID, err) + } + } + return nil +} + +func (idx *Index) indexBatches(tx *sql.Tx, allPaths []string, indexed map[string]indexedFile, total int, progress io.Writer) error { + var processed int64 + for i := 0; i < len(allPaths); i += batchSize { + end := min(i+batchSize, len(allPaths)) + batch := allPaths[i:end] + + results := parallelIndex(batch) + + for _, r := range results { + if r.err != nil { + continue + } + if _, ok := indexed[r.session.session.FilePath]; ok { + if err := idx.deleteSessionData(tx, r.session.session.ID); err != nil { + return fmt.Errorf("delete for update %s: %w", r.session.session.ID, err) + } + } + if err := idx.insertSession(tx, r.session); err != nil { + return fmt.Errorf("insert session %s: %w", r.session.session.ID, err) + } + processed++ + if progress != nil && (processed%25 == 0 || int(processed) == total) { + _, _ = fmt.Fprintf(progress, "\r %d/%d sessions indexed", processed, total) + } + } + } + if progress != nil && total > 0 { + _, _ = fmt.Fprintln(progress) + } + return nil +} + +func (idx *Index) RebuildWithProgress(includeAgents bool, progress io.Writer) (*SyncResult, error) { + idx.syncMu.Lock() + defer idx.syncMu.Unlock() + + lock, err := acquireLock(idx.path + ".lock") + if err != nil { + return nil, err + } + defer lock.release() + + if progress != nil { + _, _ = fmt.Fprintln(progress, "Dropping old index...") + } + if _, err := idx.db.Exec("DROP TABLE IF EXISTS content_fts"); err != nil { + return nil, err + } + if _, err := idx.db.Exec("DROP TABLE IF EXISTS content_map"); err != nil { + return nil, err + } + if _, err := idx.db.Exec(` + CREATE TABLE content_map ( + rowid INTEGER PRIMARY KEY, + session_id TEXT NOT NULL, + role TEXT NOT NULL, + source TEXT, + byte_offset INTEGER NOT NULL, + byte_length INTEGER NOT NULL + ) + `); err != nil { + return nil, err + } + if _, err := idx.db.Exec("CREATE INDEX idx_content_map_session ON content_map(session_id)"); err != nil { + return nil, err + } + if _, err := idx.db.Exec("DELETE FROM sessions"); err != nil { + return nil, err + } + if _, err := idx.db.Exec(` + CREATE VIRTUAL TABLE content_fts USING fts5( + text, + content='', + contentless_delete=1, + tokenize='porter unicode61' + ) + `); err != nil { + return nil, err + } + if _, err := idx.db.Exec("DELETE FROM index_meta WHERE key = 'last_sync_time'"); err != nil { + return nil, err + } + if _, err := idx.db.Exec("VACUUM"); err != nil { + return nil, err + } + idx.lastSyncTime = time.Time{} + return idx.syncLocked(includeAgents, progress) +} + +func (idx *Index) getIndexedFiles() (map[string]indexedFile, error) { + rows, err := idx.db.Query("SELECT id, file_path, modified_at, file_size FROM sessions") + if err != nil { + return nil, err + } + defer func() { _ = rows.Close() }() + + result := make(map[string]indexedFile) + for rows.Next() { + var id, path, modifiedStr string + var size int64 + if err := rows.Scan(&id, &path, &modifiedStr, &size); err != nil { + return nil, err + } + modified, _ := time.Parse(time.RFC3339, modifiedStr) + result[path] = indexedFile{ + sessionID: id, + modifiedAt: modified, + fileSize: size, + } + } + return result, rows.Err() +} + +func (idx *Index) insertSession(tx *sql.Tx, s *indexedSession) error { + sess := s.session + projectDir := filepath.Base(filepath.Dir(sess.FilePath)) + + var createdAt string + if !sess.Created.IsZero() { + createdAt = sess.Created.Format(time.RFC3339) + } + + _, err := tx.Exec(` + INSERT OR REPLACE INTO sessions (id, file_path, project_dir, project_name, project_path, is_agent, modified_at, file_size, + first_prompt, created_at, git_branch, message_count) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, sess.ID, sess.FilePath, projectDir, sess.ProjectName, sess.ProjectPath, boolToInt(sess.IsAgent), + sess.Modified.Format(time.RFC3339), s.fileSize, + sess.FirstPrompt, createdAt, sess.GitBranch, sess.MessageCount) + if err != nil { + return err + } + + for _, m := range s.messages { + res, err := tx.Exec(` + INSERT INTO content_map (session_id, role, source, byte_offset, byte_length) + VALUES (?, ?, ?, ?, ?) + `, sess.ID, m.role, m.source, m.byteOffset, m.byteLength) + if err != nil { + return err + } + + rowID, err := res.LastInsertId() + if err != nil { + return err + } + + _, err = tx.Exec(` + INSERT INTO content_fts (rowid, text) + VALUES (?, ?) + `, rowID, m.text) + if err != nil { + return err + } + } + + return nil +} + +func boolToInt(b bool) int { + if b { + return 1 + } + return 0 +} + +type indexResult struct { + session *indexedSession + err error +} + +func parallelIndex(files []string) []indexResult { + return parallelIndexWithProgress(files, nil) +} + +func parallelIndexWithProgress(files []string, progress io.Writer) []indexResult { + if len(files) == 0 { + return nil + } + + numWorkers := runtime.NumCPU() + if numWorkers > maxWorkers { + numWorkers = maxWorkers + } + if numWorkers > len(files) { + numWorkers = len(files) + } + + jobs := make(chan string, len(files)) + results := make(chan indexResult, len(files)) + var wg sync.WaitGroup + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go func() { + defer wg.Done() + for path := range jobs { + s, err := indexSession(path) + results <- indexResult{session: s, err: err} + } + }() + } + + for _, f := range files { + jobs <- f + } + close(jobs) + + go func() { + wg.Wait() + close(results) + }() + + total := len(files) + var count int + out := make([]indexResult, 0, len(files)) + for r := range results { + out = append(out, r) + if progress != nil { + count++ + if count%25 == 0 || count == total { + _, _ = fmt.Fprintf(progress, "\r %d/%d sessions indexed", count, total) + } + } + } + if progress != nil { + _, _ = fmt.Fprintln(progress) + } + return out +} + +func indexSession(path string) (*indexedSession, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + defer func() { _ = f.Close() }() + + info, err := f.Stat() + if err != nil { + return nil, err + } + + s := &session.Session{ + ID: session.ExtractIDFromFilename(path), + FilePath: path, + Modified: info.ModTime(), + } + s.ShortID = session.ShortID(s.ID) + s.IsAgent = session.IsAgentSession(s.ID) + + scanner := session.NewOffsetScanner(f) + var messages []indexedMessage + var messageCount int + + for scanner.Scan() { + line := scanner.Bytes() + lineType := session.FastExtractType(line) + + if lineType != "user" && lineType != "assistant" { + continue + } + + messageCount++ + byteOffset := scanner.Offset() + byteLength := scanner.Length() + + var obj map[string]any + if json.Unmarshal(line, &obj) != nil { + continue + } + + if lineType == "user" { + session.ExtractUserMetadata(s, obj) + } + + blocks := session.ExtractPromptBlocks(obj) + for _, block := range blocks { + if block.Text == "" { + continue + } + messages = append(messages, indexedMessage{ + role: lineType, + source: block.Source, + text: block.Text, + byteOffset: byteOffset, + byteLength: byteLength, + }) + } + } + + s.MessageCount = messageCount + + return &indexedSession{ + session: s, + messages: messages, + fileSize: info.Size(), + }, nil +} + +func (idx *Index) recentlySynced() bool { + if !idx.lastSyncTime.IsZero() && time.Since(idx.lastSyncTime) < syncCacheDuration { + return true + } + + var lastSync string + err := idx.db.QueryRow("SELECT value FROM index_meta WHERE key = 'last_sync_time'").Scan(&lastSync) + if err != nil { + return false + } + + t, err := time.Parse(time.RFC3339Nano, lastSync) + if err != nil { + return false + } + + idx.lastSyncTime = t + return time.Since(t) < syncCacheDuration +} + +func (idx *Index) updateSyncTime() { + now := time.Now() + idx.lastSyncTime = now + _, _ = idx.db.Exec( + "INSERT OR REPLACE INTO index_meta (key, value) VALUES ('last_sync_time', ?)", + now.Format(time.RFC3339Nano), + ) +} diff --git a/internal/paths/paths.go b/internal/paths/paths.go index 39b1183..7d07379 100644 --- a/internal/paths/paths.go +++ b/internal/paths/paths.go @@ -13,3 +13,14 @@ func ClaudeDir() string { func ProjectsDir() string { return filepath.Join(ClaudeDir(), "projects") } + +func CacheDir() string { + if xdg := os.Getenv("XDG_CACHE_HOME"); xdg != "" { + return filepath.Join(xdg, "cct") + } + return filepath.Join(os.Getenv("HOME"), ".cache", "cct") +} + +func IndexPath() string { + return filepath.Join(CacheDir(), "index.db") +} diff --git a/internal/session/parse.go b/internal/session/parse.go index 4b98c54..2aa27a8 100644 --- a/internal/session/parse.go +++ b/internal/session/parse.go @@ -241,9 +241,9 @@ func ParseTimestamp(obj map[string]any) time.Time { return t } -// extractUserMetadata populates session fields from a parsed user message. +// ExtractUserMetadata populates session fields from a parsed user message. // Returns true when all essential metadata (ProjectPath and FirstPrompt) is populated. -func extractUserMetadata(s *Session, obj map[string]any) bool { +func ExtractUserMetadata(s *Session, obj map[string]any) bool { if s.ProjectPath == "" { s.ProjectPath, _ = obj["cwd"].(string) s.ProjectName = filepath.Base(s.ProjectPath) @@ -261,9 +261,91 @@ func extractUserMetadata(s *Session, obj map[string]any) bool { func ExtractMetadata(path string) *Session { return parseSession(path, false) } func ParseFullSession(path string) *Session { return parseSession(path, true) } -// parseSession is the shared implementation for metadata extraction and full parsing. -// When full is true, it counts all messages and reads the entire file. -// When full is false, it returns early once project path and first prompt are found. +// OffsetScanner wraps a reader to track byte offsets for each line. +type OffsetScanner struct { + reader *bufio.Reader + offset int64 + lineLen int + line []byte + err error +} + +// NewOffsetScanner creates a scanner that tracks byte positions. +func NewOffsetScanner(r io.Reader) *OffsetScanner { + return &OffsetScanner{ + reader: bufio.NewReaderSize(r, scanInitBuf), + } +} + +// Scan advances to the next line, returning true if a line was read. +func (s *OffsetScanner) Scan() bool { + s.offset += int64(s.lineLen) + s.line, s.err = s.reader.ReadBytes('\n') + s.lineLen = len(s.line) + if s.err != nil && len(s.line) == 0 { + return false + } + return true +} + +// Bytes returns the current line (without trailing newline). +func (s *OffsetScanner) Bytes() []byte { + if len(s.line) > 0 && s.line[len(s.line)-1] == '\n' { + return s.line[:len(s.line)-1] + } + return s.line +} + +// Offset returns the byte offset of the current line in the file. +func (s *OffsetScanner) Offset() int64 { + return s.offset +} + +// Length returns the byte length of the current line (including newline). +func (s *OffsetScanner) Length() int { + return s.lineLen +} + +// ReadMessageAtOffset reads a single JSONL line at the given byte offset. +func ReadMessageAtOffset(filePath string, offset int64, length int) (role, source, text string, err error) { + f, err := os.Open(filePath) + if err != nil { + return "", "", "", err + } + defer func() { _ = f.Close() }() + + if _, err := f.Seek(offset, io.SeekStart); err != nil { + return "", "", "", err + } + + buf := make([]byte, length) + n, err := io.ReadFull(f, buf) + if err != nil && n == 0 { + return "", "", "", err + } + buf = buf[:n] + + var obj map[string]any + if err := json.Unmarshal(buf, &obj); err != nil { + return "", "", "", err + } + + role = FastExtractType(buf) + blocks := ExtractPromptBlocks(obj) + if len(blocks) > 0 { + source = blocks[0].Source + var texts []string + for _, b := range blocks { + if b.Text != "" { + texts = append(texts, b.Text) + } + } + text = strings.Join(texts, " ") + } + + return role, source, text, nil +} + func parseSession(path string, full bool) *Session { f, err := os.Open(path) if err != nil { @@ -299,7 +381,7 @@ func parseSession(path string, full bool) *Session { if json.Unmarshal(line, &obj) != nil { continue } - complete := extractUserMetadata(s, obj) + complete := ExtractUserMetadata(s, obj) if !full && complete { return s } diff --git a/internal/session/parse_test.go b/internal/session/parse_test.go index c3c0660..df64720 100644 --- a/internal/session/parse_test.go +++ b/internal/session/parse_test.go @@ -282,7 +282,7 @@ func TestExtractUserMetadata(t *testing.T) { "message": map[string]any{"role": "user", "content": "implement auth"}, } - complete := extractUserMetadata(s, obj) + complete := ExtractUserMetadata(s, obj) if !complete { t.Error("expected complete=true when ProjectPath and FirstPrompt are set") } @@ -322,7 +322,7 @@ func TestExtractUserMetadata(t *testing.T) { "message": map[string]any{"role": "user", "content": "new prompt"}, } - complete := extractUserMetadata(s, obj) + complete := ExtractUserMetadata(s, obj) if !complete { t.Error("expected complete=true") } @@ -351,7 +351,7 @@ func TestExtractUserMetadata(t *testing.T) { "message": map[string]any{"role": "user", "content": "sub-agent task"}, } - extractUserMetadata(s, obj) + ExtractUserMetadata(s, obj) if s.ID != "agent-19b8cb-fake-uuid" { t.Errorf("ID = %q, want %q (sub-agent ID must not be replaced by parent sessionId)", s.ID, "agent-19b8cb-fake-uuid") } @@ -367,7 +367,7 @@ func TestExtractUserMetadata(t *testing.T) { "message": map[string]any{"role": "user", "content": []any{}}, } - complete := extractUserMetadata(s, obj) + complete := ExtractUserMetadata(s, obj) if complete { t.Error("expected complete=false when FirstPrompt is empty") } @@ -398,3 +398,53 @@ func TestParseTimestamp(t *testing.T) { } }) } + +func TestOffsetScanner(t *testing.T) { + lines := "line one\nline two\nline three\n" + scanner := NewOffsetScanner(strings.NewReader(lines)) + + type expected struct { + text string + offset int64 + length int + } + + want := []expected{ + {"line one", 0, 9}, // "line one\n" = 9 bytes + {"line two", 9, 9}, // "line two\n" = 9 bytes + {"line three", 18, 11}, // "line three\n" = 11 bytes + } + + for i, w := range want { + if !scanner.Scan() { + t.Fatalf("line %d: Scan() returned false early", i) + } + if got := string(scanner.Bytes()); got != w.text { + t.Errorf("line %d: Bytes() = %q, want %q", i, got, w.text) + } + if got := scanner.Offset(); got != w.offset { + t.Errorf("line %d: Offset() = %d, want %d", i, got, w.offset) + } + if got := scanner.Length(); got != w.length { + t.Errorf("line %d: Length() = %d, want %d", i, got, w.length) + } + } + + if scanner.Scan() { + t.Error("expected Scan() to return false after last line") + } +} + +func TestOffsetScanner_NoTrailingNewline(t *testing.T) { + scanner := NewOffsetScanner(strings.NewReader("only line")) + + if !scanner.Scan() { + t.Fatal("expected Scan() to return true") + } + if got := string(scanner.Bytes()); got != "only line" { + t.Errorf("Bytes() = %q, want %q", got, "only line") + } + if got := scanner.Offset(); got != 0 { + t.Errorf("Offset() = %d, want 0", got) + } +} diff --git a/internal/session/scan.go b/internal/session/scan.go index 15c959f..659a71b 100644 --- a/internal/session/scan.go +++ b/internal/session/scan.go @@ -155,7 +155,7 @@ func searchOneFile(path, keyLower string, snippetWidth int, maxMatches int) *Sea } if lineType == "user" { - extractUserMetadata(s, obj) + ExtractUserMetadata(s, obj) } if maxMatches > 0 && len(matches) >= maxMatches {