;
+type SyntaxNode = Tree['topNode'];
+export type { Tree, SyntaxNode };
+export * from "./parser";
+export * from "./parser.terms";
\ No newline at end of file
diff --git a/packages/queryLanguage/src/parser.terms.ts b/packages/queryLanguage/src/parser.terms.ts
new file mode 100644
index 000000000..1682bb043
--- /dev/null
+++ b/packages/queryLanguage/src/parser.terms.ts
@@ -0,0 +1,21 @@
+// This file was generated by lezer-generator. You probably shouldn't edit it.
+export const
+ negate = 22,
+ Program = 1,
+ OrExpr = 2,
+ AndExpr = 3,
+ NegateExpr = 4,
+ PrefixExpr = 5,
+ ArchivedExpr = 6,
+ RevisionExpr = 7,
+ ContentExpr = 8,
+ ContextExpr = 9,
+ FileExpr = 10,
+ ForkExpr = 11,
+ VisibilityExpr = 12,
+ RepoExpr = 13,
+ LangExpr = 14,
+ SymExpr = 15,
+ RepoSetExpr = 16,
+ ParenExpr = 17,
+ Term = 18
diff --git a/packages/queryLanguage/src/parser.ts b/packages/queryLanguage/src/parser.ts
new file mode 100644
index 000000000..fb867c4f9
--- /dev/null
+++ b/packages/queryLanguage/src/parser.ts
@@ -0,0 +1,18 @@
+// This file was generated by lezer-generator. You probably shouldn't edit it.
+import {LRParser} from "@lezer/lr"
+import {negateToken} from "./tokens"
+export const parser = LRParser.deserialize({
+ version: 14,
+ states: "'[OVQROOO!WQQO'#CcO!WQQO'#CdO!WQQO'#CeO!WQQO'#CfO!`QSO'#CgO!kQSO'#ChO!WQQO'#CiO!WQQO'#CjO!WQQO'#CkO!WQQO'#ClOOQP'#Ca'#CaOVQRO'#CmO!vQQO'#C`OOQP'#Cn'#CnOOQP'#Cw'#CwO#nQRO'#CvO#{QQO'#CvO$WQQO'#C^OOQO'#Cu'#CuQOQQOOO!`QSO'#CbOOQP'#C}'#C}OOQP,58},58}OOQP,59O,59OOOQP,59P,59POOQP,59Q,59QOOQP'#DU'#DUOOQP,59R,59ROOQP'#DW'#DWOOQP,59S,59SOOQP,59T,59TOOQP,59U,59UOOQP,59V,59VOOQP,59W,59WO$]QQO,59XOOQP,58z,58zOOQP'#Co'#CoO$bQRO,58yOVQRO'#CpO$oQQO,58xOOQP,58|,58|OOQP1G.s1G.sOOQP-E6m-E6mO$zQRO'#CvOOQO'#Cv'#CvOOQO,59[,59[OOQO-E6n-E6n",
+ stateData: "%i~OhOS~Of]OleOpPOr^Os^OtQOuROvSOwTOyUO!OVO!PWO!QXO!RYO!S[O~OrfOsfO~OmkOnkOokO~O{mO|mO}mO~OleOpPOtQOuROvSOwTOyUO!OVO!PWO!QXO!RYO!S[O~OeiX!UjX!TiX~PVOeiX!UjX!TiX~O!UwO~O!TzO~OeRa!URa!TRa~PVO!UwOeQa!TQa~OejX!UjX!TjX~PVOrlptuvwy!O!P!Q!R!Usy~",
+ goto: "$`{PP|!Q!X!a!l!l!l!l!l!l!l!l!l!l!l!a!X!u!|PPPP#S#Y#aPPPPP#mPPPPPP$VP$]TcO[SaO[R}w]_O[`vw|[_O[`vw|Rt]_ZO[]`vw|Sv`|R{vQxbR!PxQdORs[SbO[R!OwS`O[Uu`v|R|wQgPQhQQiRQjSQoVQpWQqXRrYQlTRyeRnU",
+ nodeNames: "⚠ Program OrExpr AndExpr NegateExpr PrefixExpr ArchivedExpr RevisionExpr ContentExpr ContextExpr FileExpr ForkExpr VisibilityExpr RepoExpr LangExpr SymExpr RepoSetExpr ParenExpr Term",
+ maxTerm: 52,
+ skippedNodes: [0],
+ repeatNodeCount: 2,
+ tokenData: "!Hc~RpOX#VXY$TYZ$TZp#Vpq$Tqr#Vrs$csx#Vxy&Vyz&[z#T#V#T#U&a#U#V#V#V#W0q#W#Y#V#Y#Z:{#Z#`#V#`#aCZ#a#b#V#b#cGu#c#dIm#d#e!!V#e#f#V#f#g!-k#g#h!7q#h#j#V#j#k!;_#k#m#V#m#n!Em#n;'S#V;'S;=`#}<%lO#VP#[ZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#VP$QP;=`<%l#V~$YRh~XY$TYZ$Tpq$T~$fWOY$cZr$crs%Os#O$c#O#P%T#P;'S$c;'S;=`&P<%lO$c~%TOr~~%WRO;'S$c;'S;=`%a;=`O$c~%dXOY$cZr$crs%Os#O$c#O#P%T#P;'S$c;'S;=`&P;=`<%l$c<%lO$c~&SP;=`<%l$c~&[O!S~~&aO!T~~&f_sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#b#V#b#c'e#c#f#V#f#g)]#g;'S#V;'S;=`#}<%lO#VR'j]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#m#V#m#n(c#n;'S#V;'S;=`#}<%lO#VR(jZ}QsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#V~)b]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#V#V#V#W*Z#W;'S#V;'S;=`#}<%lO#V~*`]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#[#V#[#]+X#];'S#V;'S;=`#}<%lO#V~+^]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#]#V#]#^,V#^;'S#V;'S;=`#}<%lO#V~,[]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#j#V#j#k-T#k;'S#V;'S;=`#}<%lO#V~-Y]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#X#V#X#Y.R#Y;'S#V;'S;=`#}<%lO#V~.W]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#W#V#W#X/P#X;'S#V;'S;=`#}<%lO#V~/UZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]/w!];'S#V;'S;=`#}<%lO#V~0OZl~sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#V~0v]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]1o!]#c#V#c#d2i#d;'S#V;'S;=`#}<%lO#V~1vZt~sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#V~2n]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#b#V#b#c3g#c;'S#V;'S;=`#}<%lO#V~3l]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#h#V#h#i4e#i;'S#V;'S;=`#}<%lO#V~4j]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#X#V#X#Y5c#Y;'S#V;'S;=`#}<%lO#V~5h_sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#b#V#b#c6g#c#l#V#l#m8]#m;'S#V;'S;=`#}<%lO#V~6l]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#h#V#h#i7e#i;'S#V;'S;=`#}<%lO#V~7jZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]1o!];'S#V;'S;=`#}<%lO#V~8b]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#h#V#h#i9Z#i;'S#V;'S;=`#}<%lO#V~9`ZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]:R!];'S#V;'S;=`#}<%lO#V~:YZu~sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#V~;Q_sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]u#Y;'S#V;'S;=`#}<%lO#V~>zZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]
X#^;'S#V;'S;=`#}<%lO#V~!>^]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#U#V#U#V!?V#V;'S#V;'S;=`#}<%lO#V~!?[]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#]#V#]#^!@T#^;'S#V;'S;=`#}<%lO#V~!@Y]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#`#V#`#a!AR#a;'S#V;'S;=`#}<%lO#V~!AW]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#]#V#]#^!BP#^;'S#V;'S;=`#}<%lO#V~!BU]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#h#V#h#i!B}#i;'S#V;'S;=`#}<%lO#V~!CS]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#m#V#m#n!C{#n;'S#V;'S;=`#}<%lO#V~!DQZsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]!Ds!];'S#V;'S;=`#}<%lO#V~!DzZy~sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#VR!Er]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#X#V#X#Y!Fk#Y;'S#V;'S;=`#}<%lO#VR!Fp]sPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!]#g#V#g#h!Gi#h;'S#V;'S;=`#}<%lO#VR!GpZmQsPOX#VZp#Vqr#Vsx#Vz}#V}!O#V!O![#V![!]#V!];'S#V;'S;=`#}<%lO#V",
+ tokenizers: [negateToken, 0, 1],
+ topRules: {"Program":[0,1]},
+ tokenPrec: 193,
+ termNames: {"0":"⚠","1":"@top","2":"OrExpr","3":"AndExpr","4":"NegateExpr","5":"PrefixExpr","6":"ArchivedExpr","7":"RevisionExpr","8":"ContentExpr","9":"ContextExpr","10":"FileExpr","11":"ForkExpr","12":"VisibilityExpr","13":"RepoExpr","14":"LangExpr","15":"SymExpr","16":"RepoSetExpr","17":"ParenExpr","18":"Term","19":"expr+","20":"(or andExpr)+","21":"␄","22":"negate","23":"%mainskip","24":"space","25":"query","26":"andExpr","27":"expr","28":"archivedKw","29":"\"yes\"","30":"\"no\"","31":"\"only\"","32":"revisionKw","33":"value","34":"quotedString","35":"word","36":"contentKw","37":"contextKw","38":"fileKw","39":"forkKw","40":"forkValue","41":"visibilityKw","42":"visibilityValue","43":"\"public\"","44":"\"private\"","45":"\"any\"","46":"repoKw","47":"langKw","48":"symKw","49":"reposetKw","50":"\"(\"","51":"\")\"","52":"or"}
+})
diff --git a/packages/queryLanguage/src/query.grammar b/packages/queryLanguage/src/query.grammar
new file mode 100644
index 000000000..66c0ee83f
--- /dev/null
+++ b/packages/queryLanguage/src/query.grammar
@@ -0,0 +1,102 @@
+@external tokens negateToken from "./tokens" { negate }
+
+@top Program { query }
+
+@precedence {
+ negate,
+ and,
+ or @left
+}
+
+query {
+ OrExpr |
+ AndExpr |
+ expr
+}
+
+OrExpr { andExpr (or andExpr)+ }
+
+AndExpr { expr expr+ }
+
+andExpr { AndExpr | expr }
+
+expr {
+ NegateExpr |
+ ParenExpr |
+ PrefixExpr |
+ Term
+}
+
+NegateExpr { !negate negate (PrefixExpr | ParenExpr) }
+
+ParenExpr { "(" query ")" }
+
+PrefixExpr {
+ ArchivedExpr |
+ RevisionExpr |
+ ContentExpr |
+ ContextExpr |
+ FileExpr |
+ ForkExpr |
+ VisibilityExpr |
+ RepoExpr |
+ LangExpr |
+ SymExpr |
+ RepoSetExpr
+}
+
+RevisionExpr { revisionKw value }
+ContentExpr { contentKw value }
+ContextExpr { contextKw value }
+FileExpr { fileKw value }
+RepoExpr { repoKw value }
+LangExpr { langKw value }
+SymExpr { symKw value }
+RepoSetExpr { reposetKw value }
+
+// Modifiers
+ArchivedExpr { archivedKw archivedValue }
+ForkExpr { forkKw forkValue }
+VisibilityExpr { visibilityKw visibilityValue }
+
+archivedValue { "yes" | "no" | "only" }
+forkValue { "yes" | "no" | "only" }
+visibilityValue { "public" | "private" | "any" }
+
+Term { quotedString | word }
+
+value { quotedString | word }
+
+@skip { space }
+
+@tokens {
+ archivedKw { "archived:" }
+ revisionKw { "rev:" }
+ contentKw { "content:" | "c:" }
+ contextKw { "context:" }
+ fileKw { "file:" | "f:" }
+ forkKw { "fork:" }
+ visibilityKw { "visibility:" }
+ repoKw { "repo:" | "r:" }
+ langKw { "lang:" }
+ symKw { "sym:" }
+ reposetKw { "reposet:" }
+
+ or { "or" ![a-zA-Z0-9_] }
+
+ quotedString { '"' (!["\\\n] | "\\" _)* '"' }
+
+ // Allow almost anything in a word except spaces, parens, quotes
+ // Colons and dashes are allowed anywhere in words (including at the start)
+ word { (![ \t\n()"]) (![ \t\n()":] | ":" | "-")* }
+
+ space { $[ \t\n]+ }
+
+ @precedence {
+ quotedString,
+ archivedKw, revisionKw, contentKw, contextKw, fileKw,
+ forkKw, visibilityKw, repoKw, langKw,
+ symKw, reposetKw, or,
+ word
+ }
+}
\ No newline at end of file
diff --git a/packages/queryLanguage/src/tokens.ts b/packages/queryLanguage/src/tokens.ts
new file mode 100644
index 000000000..15a02525c
--- /dev/null
+++ b/packages/queryLanguage/src/tokens.ts
@@ -0,0 +1,59 @@
+import { ExternalTokenizer } from "@lezer/lr";
+import { negate } from "./parser.terms";
+
+// External tokenizer for negation
+// Only tokenizes `-` as negate when followed by a prefix keyword or `(`
+export const negateToken = new ExternalTokenizer((input) => {
+ if (input.next !== 45 /* '-' */) return; // Not a dash
+
+ const startPos = input.pos;
+
+ // Look ahead to see what follows the dash
+ input.advance();
+
+ // Skip whitespace
+ let ch = input.next;
+ while (ch === 32 || ch === 9 || ch === 10) {
+ input.advance();
+ ch = input.next;
+ }
+
+ // Check if followed by opening paren
+ if (ch === 40 /* '(' */) {
+ input.acceptToken(negate, -input.pos + startPos + 1); // Accept just the dash
+ return;
+ }
+
+ // Check if followed by a prefix keyword (by checking for keyword followed by colon)
+ // Look ahead until we hit a delimiter or colon
+ const checkPos = input.pos;
+ let foundColon = false;
+
+ // Look ahead until we hit a delimiter or colon
+ while (ch >= 0) {
+ if (ch === 58 /* ':' */) {
+ foundColon = true;
+ break;
+ }
+ // Hit a delimiter (whitespace, paren, or quote) - not a prefix keyword
+ if (ch === 32 || ch === 9 || ch === 10 || ch === 40 || ch === 41 || ch === 34) {
+ break;
+ }
+ input.advance();
+ ch = input.next;
+ }
+
+ // Reset position
+ while (input.pos > checkPos) {
+ input.advance(-1);
+ }
+
+ if (foundColon) {
+ // It's a prefix keyword, accept as negate
+ input.acceptToken(negate, -input.pos + startPos + 1);
+ return;
+ }
+
+ // Otherwise, don't tokenize as negate (let word handle it)
+});
+
diff --git a/packages/queryLanguage/test/basic.txt b/packages/queryLanguage/test/basic.txt
new file mode 100644
index 000000000..de8bb93bd
--- /dev/null
+++ b/packages/queryLanguage/test/basic.txt
@@ -0,0 +1,72 @@
+# Single term
+
+hello
+
+==>
+
+Program(Term)
+
+# Multiple terms
+
+hello world
+
+==>
+
+Program(AndExpr(Term,Term))
+
+# Multiple terms with various characters
+
+console.log error_handler
+
+==>
+
+Program(AndExpr(Term,Term))
+
+# Term with underscores
+
+my_variable_name
+
+==>
+
+Program(Term)
+
+# Term with dots
+
+com.example.package
+
+==>
+
+Program(Term)
+
+# Term with numbers
+
+func123 test_456
+
+==>
+
+Program(AndExpr(Term,Term))
+
+# Regex pattern
+
+[a-z]+
+
+==>
+
+Program(Term)
+
+# Wildcard pattern
+
+test.*
+
+==>
+
+Program(Term)
+
+# Multiple regex patterns
+
+\w+ [0-9]+ \s*
+
+==>
+
+Program(AndExpr(Term,Term,Term))
+
diff --git a/packages/queryLanguage/test/grammar.test.ts b/packages/queryLanguage/test/grammar.test.ts
new file mode 100644
index 000000000..a02862856
--- /dev/null
+++ b/packages/queryLanguage/test/grammar.test.ts
@@ -0,0 +1,21 @@
+import { parser } from "../src/parser";
+import { fileTests } from "@lezer/generator/dist/test";
+import { describe, it } from "vitest";
+import { fileURLToPath } from "url"
+import * as fs from "fs";
+import * as path from "path";
+
+const caseDir = path.dirname(fileURLToPath(import.meta.url))
+
+for (const file of fs.readdirSync(caseDir)) {
+ if (!/\.txt$/.test(file)) {
+ continue;
+ }
+
+ let name = /^[^\.]*/.exec(file)?.[0];
+ describe(name ?? "unknown", () => {
+ for (const { name, run } of fileTests(fs.readFileSync(path.join(caseDir, file), "utf8"), file)) {
+ it(name, () => run(parser));
+ }
+ });
+}
\ No newline at end of file
diff --git a/packages/queryLanguage/test/grouping.txt b/packages/queryLanguage/test/grouping.txt
new file mode 100644
index 000000000..e8c7798eb
--- /dev/null
+++ b/packages/queryLanguage/test/grouping.txt
@@ -0,0 +1,120 @@
+# Empty parentheses
+
+()
+
+==>
+
+Program(ParenExpr(Term(⚠)))
+
+# Simple grouping
+
+(test)
+
+==>
+
+Program(ParenExpr(Term))
+
+# Multiple terms in group
+
+(hello world)
+
+==>
+
+Program(ParenExpr(AndExpr(Term,Term)))
+
+# Nested parentheses
+
+((test))
+
+==>
+
+Program(ParenExpr(ParenExpr(Term)))
+
+# Multiple groups
+
+(first) (second)
+
+==>
+
+Program(AndExpr(ParenExpr(Term),ParenExpr(Term)))
+
+# Group with multiple terms
+
+(one two three)
+
+==>
+
+Program(ParenExpr(AndExpr(Term,Term,Term)))
+
+# Mixed grouped and ungrouped
+
+test (grouped) another
+
+==>
+
+Program(AndExpr(Term,ParenExpr(Term),Term))
+
+# Deeply nested
+
+(((nested)))
+
+==>
+
+Program(ParenExpr(ParenExpr(ParenExpr(Term))))
+
+# Multiple nested groups
+
+((a b) (c d))
+
+==>
+
+Program(ParenExpr(AndExpr(ParenExpr(AndExpr(Term,Term)),ParenExpr(AndExpr(Term,Term)))))
+
+# Group at start
+
+(start) middle end
+
+==>
+
+Program(AndExpr(ParenExpr(Term),Term,Term))
+
+# Group at end
+
+start middle (end)
+
+==>
+
+Program(AndExpr(Term,Term,ParenExpr(Term)))
+
+# Complex grouping pattern
+
+(a (b c) d)
+
+==>
+
+Program(ParenExpr(AndExpr(Term,ParenExpr(AndExpr(Term,Term)),Term)))
+
+# Sequential groups
+
+(a)(b)(c)
+
+==>
+
+Program(AndExpr(ParenExpr(Term),ParenExpr(Term),ParenExpr(Term)))
+
+# Group with regex
+
+([a-z]+)
+
+==>
+
+Program(ParenExpr(Term))
+
+# Group with dots
+
+(com.example.test)
+
+==>
+
+Program(ParenExpr(Term))
+
diff --git a/packages/queryLanguage/test/negation.txt b/packages/queryLanguage/test/negation.txt
new file mode 100644
index 000000000..716da1157
--- /dev/null
+++ b/packages/queryLanguage/test/negation.txt
@@ -0,0 +1,255 @@
+# Literal dash term
+
+-test
+
+==>
+
+Program(Term)
+
+# Quoted dash term
+
+"-excluded"
+
+==>
+
+Program(Term)
+
+# Dash in middle
+
+test-case
+
+==>
+
+Program(Term)
+
+# Multiple dash terms
+
+-one -two -three
+
+==>
+
+Program(AndExpr(Term,Term,Term))
+
+# Negate file prefix
+
+-file:test.js
+
+==>
+
+Program(NegateExpr(PrefixExpr(FileExpr)))
+
+# Negate repo prefix
+
+-repo:archived
+
+==>
+
+Program(NegateExpr(PrefixExpr(RepoExpr)))
+
+# Negate lang prefix
+
+-lang:python
+
+==>
+
+Program(NegateExpr(PrefixExpr(LangExpr)))
+
+# Negate content prefix
+
+-content:TODO
+
+==>
+
+Program(NegateExpr(PrefixExpr(ContentExpr)))
+
+# Negate revision prefix
+
+-rev:develop
+
+==>
+
+Program(NegateExpr(PrefixExpr(RevisionExpr)))
+
+# Negate archived prefix
+
+-archived:yes
+
+==>
+
+Program(NegateExpr(PrefixExpr(ArchivedExpr)))
+
+# Negate fork prefix
+
+-fork:yes
+
+==>
+
+Program(NegateExpr(PrefixExpr(ForkExpr)))
+
+# Negate visibility prefix
+
+-visibility:any
+
+==>
+
+Program(NegateExpr(PrefixExpr(VisibilityExpr)))
+
+# Negate context prefix
+
+-context:backend
+
+==>
+
+Program(NegateExpr(PrefixExpr(ContextExpr)))
+
+# Negate symbol prefix
+
+-sym:OldClass
+
+==>
+
+Program(NegateExpr(PrefixExpr(SymExpr)))
+
+# Negate parentheses
+
+-(test)
+
+==>
+
+Program(NegateExpr(ParenExpr(Term)))
+
+# Negate group with multiple terms
+
+-(test exclude)
+
+==>
+
+Program(NegateExpr(ParenExpr(AndExpr(Term,Term))))
+
+# Negate group with prefix
+
+-(file:test.js console.log)
+
+==>
+
+Program(NegateExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),Term))))
+
+# Prefix with negated term
+
+file:test.js -console
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),Term))
+
+# Multiple prefixes with negation
+
+file:test.js -lang:python
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),NegateExpr(PrefixExpr(LangExpr))))
+
+# Complex negation pattern
+
+function -file:test.js -lang:java
+
+==>
+
+Program(AndExpr(Term,NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(LangExpr))))
+
+# Negation inside parentheses
+
+(-file:test.js)
+
+==>
+
+Program(ParenExpr(NegateExpr(PrefixExpr(FileExpr))))
+
+# Multiple negations in group
+
+(-file:a.js -lang:python)
+
+==>
+
+Program(ParenExpr(AndExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(LangExpr)))))
+
+# Mixed in parentheses
+
+(include -file:test.js)
+
+==>
+
+Program(ParenExpr(AndExpr(Term,NegateExpr(PrefixExpr(FileExpr)))))
+
+# Negate nested group
+
+-((file:test.js))
+
+==>
+
+Program(NegateExpr(ParenExpr(ParenExpr(PrefixExpr(FileExpr)))))
+
+# Negate short form prefix
+
+-f:test.js
+
+==>
+
+Program(NegateExpr(PrefixExpr(FileExpr)))
+
+# Negate short form repo
+
+-r:myrepo
+
+==>
+
+Program(NegateExpr(PrefixExpr(RepoExpr)))
+
+# Negate short form content
+
+-c:console
+
+==>
+
+Program(NegateExpr(PrefixExpr(ContentExpr)))
+
+# Negate with prefix in quotes
+
+-file:"test file.js"
+
+==>
+
+Program(NegateExpr(PrefixExpr(FileExpr)))
+
+# Complex with multiple negated prefixes
+
+lang:typescript -file:*.test.ts -file:*.spec.ts
+
+==>
+
+Program(AndExpr(PrefixExpr(LangExpr),NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr))))
+
+# Negated group with prefix
+
+-(file:test.js lang:python)
+
+==>
+
+Program(NegateExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr)))))
+
+# Negate empty group
+
+-()
+
+==>
+
+Program(NegateExpr(ParenExpr(Term(⚠))))
+
+# Negate with space after dash
+
+- file:test.js
+
+==>
+
+Program(NegateExpr(PrefixExpr(FileExpr)))
diff --git a/packages/queryLanguage/test/operators.txt b/packages/queryLanguage/test/operators.txt
new file mode 100644
index 000000000..0ff1f6d82
--- /dev/null
+++ b/packages/queryLanguage/test/operators.txt
@@ -0,0 +1,271 @@
+# Simple OR
+
+test or example
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# Multiple OR
+
+one or two or three
+
+==>
+
+Program(OrExpr(Term,Term,Term))
+
+# OR with prefixes
+
+file:test.js or file:example.js
+
+==>
+
+Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(FileExpr)))
+
+# OR with negation
+
+test or -file:excluded.js
+
+==>
+
+Program(OrExpr(Term,NegateExpr(PrefixExpr(FileExpr))))
+
+# OR with quoted strings
+
+"first option" or "second option"
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# OR with different prefixes
+
+lang:python or lang:javascript
+
+==>
+
+Program(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr)))
+
+# Multiple terms with OR
+
+function test or class example
+
+==>
+
+Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
+
+# OR in parentheses
+
+(test or example)
+
+==>
+
+Program(ParenExpr(OrExpr(Term,Term)))
+
+# OR with parentheses outside
+
+(test) or (example)
+
+==>
+
+Program(OrExpr(ParenExpr(Term),ParenExpr(Term)))
+
+# Complex OR with grouping
+
+(file:*.js lang:javascript) or (file:*.ts lang:typescript)
+
+==>
+
+Program(OrExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr))),ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr)))))
+
+# OR with mixed content
+
+test or file:example.js
+
+==>
+
+Program(OrExpr(Term,PrefixExpr(FileExpr)))
+
+# Prefix OR term
+
+file:test.js or example
+
+==>
+
+Program(OrExpr(PrefixExpr(FileExpr),Term))
+
+# OR with short form prefixes
+
+f:test.js or r:myrepo
+
+==>
+
+Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
+
+# OR with repo prefixes
+
+repo:project1 or repo:project2
+
+==>
+
+Program(OrExpr(PrefixExpr(RepoExpr),PrefixExpr(RepoExpr)))
+
+# OR with revision prefixes
+
+rev:main or rev:develop
+
+==>
+
+Program(OrExpr(PrefixExpr(RevisionExpr),PrefixExpr(RevisionExpr)))
+
+# OR with lang prefixes
+
+lang:rust or lang:go
+
+==>
+
+Program(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr)))
+
+# OR with content
+
+content:TODO or content:FIXME
+
+==>
+
+Program(OrExpr(PrefixExpr(ContentExpr),PrefixExpr(ContentExpr)))
+
+# OR with negated terms
+
+-file:test.js or -file:spec.js
+
+==>
+
+Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr))))
+
+# OR in nested parentheses
+
+((a or b) or (c or d))
+
+==>
+
+Program(ParenExpr(OrExpr(ParenExpr(OrExpr(Term,Term)),ParenExpr(OrExpr(Term,Term)))))
+
+# Multiple OR with parentheses and implicit AND
+
+(a or b) and (c or d)
+
+==>
+
+Program(AndExpr(ParenExpr(OrExpr(Term,Term)),Term,ParenExpr(OrExpr(Term,Term))))
+
+# OR with wildcards
+
+*.test.js or *.spec.js
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# OR with regex patterns
+
+[a-z]+ or [0-9]+
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# OR with dots
+
+com.example.test or org.example.test
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# OR with dashes
+
+test-one or test-two
+
+==>
+
+Program(OrExpr(Term,Term))
+
+# Word containing 'or'
+
+order
+
+==>
+
+Program(Term)
+
+# Word containing 'or' in middle
+
+before
+
+==>
+
+Program(Term)
+
+# OR at start
+
+or test
+
+==>
+
+Program(⚠,Term)
+
+# OR at end (or becomes term)
+
+test or
+
+==>
+
+Program(AndExpr(Term,Term))
+
+# Multiple consecutive OR
+
+test or or example
+
+==>
+
+Program(OrExpr(Term,⚠,Term))
+
+# OR with all prefix types
+
+file:*.js or repo:myrepo or lang:javascript
+
+==>
+
+Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr),PrefixExpr(LangExpr)))
+
+# Complex query with OR and negation
+
+(lang:python or lang:ruby) -file:test.py
+
+==>
+
+Program(AndExpr(ParenExpr(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr))),NegateExpr(PrefixExpr(FileExpr))))
+
+# OR with quoted prefix values
+
+file:"test one.js" or file:"test two.js"
+
+==>
+
+Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(FileExpr)))
+
+# OR with empty parentheses
+
+() or ()
+
+==>
+
+Program(OrExpr(ParenExpr(Term(⚠)),ParenExpr(Term(⚠))))
+
+# OR with negated groups
+
+-(file:a.js) or -(file:b.js)
+
+==>
+
+Program(OrExpr(NegateExpr(ParenExpr(PrefixExpr(FileExpr))),NegateExpr(ParenExpr(PrefixExpr(FileExpr)))))
diff --git a/packages/queryLanguage/test/precedence.txt b/packages/queryLanguage/test/precedence.txt
new file mode 100644
index 000000000..d43e5b346
--- /dev/null
+++ b/packages/queryLanguage/test/precedence.txt
@@ -0,0 +1,200 @@
+# OR has lowest precedence - implicit AND groups first
+
+a b or c d
+
+==>
+
+Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
+
+# Multiple OR operators are left-associative
+
+a or b or c
+
+==>
+
+Program(OrExpr(Term,Term,Term))
+
+# AND before OR
+
+file:test.js error or file:test.go panic
+
+==>
+
+Program(OrExpr(AndExpr(PrefixExpr(FileExpr),Term),AndExpr(PrefixExpr(FileExpr),Term)))
+
+# Negation binds tighter than AND
+
+-file:test.js error
+
+==>
+
+Program(AndExpr(NegateExpr(PrefixExpr(FileExpr)),Term))
+
+# Negation binds tighter than OR
+
+-file:a.js or file:b.js
+
+==>
+
+Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(FileExpr)))
+
+# Parentheses override precedence
+
+(a or b) c
+
+==>
+
+Program(AndExpr(ParenExpr(OrExpr(Term,Term)),Term))
+
+# Parentheses override - OR inside parens groups first
+
+a (b or c)
+
+==>
+
+Program(AndExpr(Term,ParenExpr(OrExpr(Term,Term))))
+
+# Complex: AND, OR, and negation
+
+a -b or c d
+
+==>
+
+Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
+
+# Negated group in OR expression
+
+-(a b) or c
+
+==>
+
+Program(OrExpr(NegateExpr(ParenExpr(AndExpr(Term,Term))),Term))
+
+# Multiple negations in OR
+
+-file:a.js or -file:b.js or file:c.js
+
+==>
+
+Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(FileExpr)))
+
+# Prefix binds to its value only
+
+file:a.js b.js
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),Term))
+
+# OR with prefixes and terms mixed
+
+repo:backend error or repo:frontend warning
+
+==>
+
+Program(OrExpr(AndExpr(PrefixExpr(RepoExpr),Term),AndExpr(PrefixExpr(RepoExpr),Term)))
+
+# Nested parentheses with OR
+
+((a or b) c) or d
+
+==>
+
+Program(OrExpr(ParenExpr(AndExpr(ParenExpr(OrExpr(Term,Term)),Term)),Term))
+
+# OR at different nesting levels
+
+(a or (b or c))
+
+==>
+
+Program(ParenExpr(OrExpr(Term,ParenExpr(OrExpr(Term,Term)))))
+
+# Implicit AND groups all adjacent terms before OR
+
+a b c or d e f
+
+==>
+
+Program(OrExpr(AndExpr(Term,Term,Term),AndExpr(Term,Term,Term)))
+
+# Mixed prefix and regular terms with OR
+
+lang:go func or lang:rust fn
+
+==>
+
+Program(OrExpr(AndExpr(PrefixExpr(LangExpr),Term),AndExpr(PrefixExpr(LangExpr),Term)))
+
+# Negation doesn't affect OR grouping
+
+a or -b or c
+
+==>
+
+Program(OrExpr(Term,Term,Term))
+
+# Parentheses can isolate OR from surrounding AND
+
+a (b or c) d
+
+==>
+
+Program(AndExpr(Term,ParenExpr(OrExpr(Term,Term)),Term))
+
+# Multiple parenthesized groups with AND
+
+(a or b) (c or d)
+
+==>
+
+Program(AndExpr(ParenExpr(OrExpr(Term,Term)),ParenExpr(OrExpr(Term,Term))))
+
+# Quoted strings are atomic - no precedence inside
+
+"a or b"
+
+==>
+
+Program(Term)
+
+# Prefix with OR value doesn't split
+
+file:"a.js or b.js"
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Negated prefix in complex expression
+
+-file:test.js lang:go error or warning
+
+==>
+
+Program(OrExpr(AndExpr(NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(LangExpr),Term),Term))
+
+# OR followed by parenthesized AND
+
+a or (b c)
+
+==>
+
+Program(OrExpr(Term,ParenExpr(AndExpr(Term,Term))))
+
+# Empty parens don't affect precedence
+
+() or a b
+
+==>
+
+Program(OrExpr(ParenExpr(Term(⚠)),AndExpr(Term,Term)))
+
+# Negation of empty group
+
+-() a
+
+==>
+
+Program(AndExpr(NegateExpr(ParenExpr(Term(⚠))),Term))
+
diff --git a/packages/queryLanguage/test/prefixes.txt b/packages/queryLanguage/test/prefixes.txt
new file mode 100644
index 000000000..00533ec03
--- /dev/null
+++ b/packages/queryLanguage/test/prefixes.txt
@@ -0,0 +1,336 @@
+# File prefix
+
+file:README.md
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# File prefix short form
+
+f:index.ts
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Repo prefix
+
+repo:myproject
+
+==>
+
+Program(PrefixExpr(RepoExpr))
+
+# Repo prefix short form
+
+r:github.com/user/repo
+
+==>
+
+Program(PrefixExpr(RepoExpr))
+
+# Content prefix
+
+content:function
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Content prefix short form
+
+c:console.log
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Revision prefix
+
+rev:main
+
+==>
+
+Program(PrefixExpr(RevisionExpr))
+
+# Lang prefix
+
+lang:typescript
+
+==>
+
+Program(PrefixExpr(LangExpr))
+
+# Archived prefix - no
+
+archived:no
+
+==>
+
+Program(PrefixExpr(ArchivedExpr))
+
+# Archived prefix - only
+
+archived:only
+
+==>
+
+Program(PrefixExpr(ArchivedExpr))
+
+# Fork prefix - yes
+
+fork:yes
+
+==>
+
+Program(PrefixExpr(ForkExpr))
+
+# Fork prefix - only
+
+fork:only
+
+==>
+
+Program(PrefixExpr(ForkExpr))
+
+# Visibility prefix - public
+
+visibility:public
+
+==>
+
+Program(PrefixExpr(VisibilityExpr))
+
+# Context prefix
+
+context:web
+
+==>
+
+Program(PrefixExpr(ContextExpr))
+
+# Symbol prefix
+
+sym:MyClass
+
+==>
+
+Program(PrefixExpr(SymExpr))
+
+# RepoSet prefix
+
+reposet:repo1,repo2
+
+==>
+
+Program(PrefixExpr(RepoSetExpr))
+
+# File with wildcard
+
+file:*.ts
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# File with path
+
+file:src/components/Button.tsx
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Repo with full URL
+
+repo:github.com/org/project
+
+==>
+
+Program(PrefixExpr(RepoExpr))
+
+# Multiple prefixes
+
+file:test.js repo:myproject
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
+
+# Prefix with term
+
+file:test.js console.log
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),Term))
+
+# Term then prefix
+
+console.log file:handler.ts
+
+==>
+
+Program(AndExpr(Term,PrefixExpr(FileExpr)))
+
+# Multiple prefixes and terms
+
+lang:typescript function file:handler.ts
+
+==>
+
+Program(AndExpr(PrefixExpr(LangExpr),Term,PrefixExpr(FileExpr)))
+
+# Prefix with regex pattern
+
+file:[a-z]+\.test\.js
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Content with spaces in value (no quotes)
+
+content:hello
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Revision with slashes
+
+rev:feature/new-feature
+
+==>
+
+Program(PrefixExpr(RevisionExpr))
+
+# RepoSet with multiple repos
+
+reposet:repo1,repo2,repo3
+
+==>
+
+Program(PrefixExpr(RepoSetExpr))
+
+# Symbol with dots
+
+sym:package.Class.method
+
+==>
+
+Program(PrefixExpr(SymExpr))
+
+# Lang with various languages
+
+lang:python
+
+==>
+
+Program(PrefixExpr(LangExpr))
+
+# Archived prefix - yes
+
+archived:yes
+
+==>
+
+Program(PrefixExpr(ArchivedExpr))
+
+# Archived prefix - invalid value (error case)
+
+archived:invalid
+
+==>
+
+Program(AndExpr(PrefixExpr(ArchivedExpr(⚠)),Term))
+
+# Fork prefix - no
+
+fork:no
+
+==>
+
+Program(PrefixExpr(ForkExpr))
+
+# Fork prefix - invalid value (error case)
+
+fork:invalid
+
+==>
+
+Program(AndExpr(PrefixExpr(ForkExpr(⚠)),Term))
+
+# Visibility prefix - private
+
+visibility:private
+
+==>
+
+Program(PrefixExpr(VisibilityExpr))
+
+# Visibility prefix - any
+
+visibility:any
+
+==>
+
+Program(PrefixExpr(VisibilityExpr))
+
+# Visibility prefix - invalid value (error case)
+
+visibility:invalid
+
+==>
+
+Program(AndExpr(PrefixExpr(VisibilityExpr(⚠)),Term))
+
+# File with dashes
+
+file:my-component.tsx
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Repo with numbers
+
+repo:project123
+
+==>
+
+Program(PrefixExpr(RepoExpr))
+
+# Content with special chars
+
+content:@Component
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Context with underscores
+
+context:data_engineering
+
+==>
+
+Program(PrefixExpr(ContextExpr))
+
+# Prefix in parentheses
+
+(file:test.js)
+
+==>
+
+Program(ParenExpr(PrefixExpr(FileExpr)))
+
+# Multiple prefixes in group
+
+(file:*.ts lang:typescript)
+
+==>
+
+Program(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr))))
+
diff --git a/packages/queryLanguage/test/quoted.txt b/packages/queryLanguage/test/quoted.txt
new file mode 100644
index 000000000..088ed0d3b
--- /dev/null
+++ b/packages/queryLanguage/test/quoted.txt
@@ -0,0 +1,479 @@
+# Simple quoted string
+
+"hello"
+
+==>
+
+Program(Term)
+
+# Quoted string with spaces
+
+"hello world"
+
+==>
+
+Program(Term)
+
+# Multiple words in quotes
+
+"this is a search term"
+
+==>
+
+Program(Term)
+
+# Quoted string with escaped quote
+
+"hello \"world\""
+
+==>
+
+Program(Term)
+
+# Quoted string with escaped backslash
+
+"path\\to\\file"
+
+==>
+
+Program(Term)
+
+# Double backslash
+
+"test\\\\path"
+
+==>
+
+Program(Term)
+
+# Multiple escaped quotes
+
+"\"quoted\" \"words\""
+
+==>
+
+Program(Term)
+
+# Mixed escaped characters
+
+"test\\nvalue\"quoted"
+
+==>
+
+Program(Term)
+
+# Empty quoted string
+
+""
+
+==>
+
+Program(Term)
+
+# Quoted string with only spaces
+
+" "
+
+==>
+
+Program(Term)
+
+# Quoted string in file prefix
+
+file:"my file.txt"
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Quoted string in repo prefix
+
+repo:"github.com/user/repo name"
+
+==>
+
+Program(PrefixExpr(RepoExpr))
+
+# Quoted string in content prefix
+
+content:"console.log"
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Quoted string in revision prefix
+
+rev:"feature/my feature"
+
+==>
+
+Program(PrefixExpr(RevisionExpr))
+
+# Multiple quoted strings
+
+"first string" "second string"
+
+==>
+
+Program(AndExpr(Term,Term))
+
+# Quoted and unquoted mixed
+
+unquoted "quoted string" another
+
+==>
+
+Program(AndExpr(Term,Term,Term))
+
+# Quoted string with parentheses inside
+
+"(test)"
+
+==>
+
+Program(Term)
+
+# Quoted string with brackets
+
+"[a-z]+"
+
+==>
+
+Program(Term)
+
+# Quoted string with special chars
+
+"test@example.com"
+
+==>
+
+Program(Term)
+
+# Quoted string with colons
+
+"key:value"
+
+==>
+
+Program(Term)
+
+# Quoted string with dashes
+
+"test-case-example"
+
+==>
+
+Program(Term)
+
+# Quoted string with dots
+
+"com.example.package"
+
+==>
+
+Program(Term)
+
+# Quoted string with regex pattern
+
+"\\w+\\s*=\\s*\\d+"
+
+==>
+
+Program(Term)
+
+# Quoted string with forward slashes
+
+"path/to/file"
+
+==>
+
+Program(Term)
+
+# Quoted string with underscores
+
+"my_variable_name"
+
+==>
+
+Program(Term)
+
+# Quoted string with numbers
+
+"test123"
+
+==>
+
+Program(Term)
+
+# Quoted string with mixed case
+
+"CamelCaseTest"
+
+==>
+
+Program(Term)
+
+# Quoted prefix value with spaces
+
+file:"test file.js"
+
+==>
+
+Program(PrefixExpr(FileExpr))
+
+# Multiple prefixes with quoted values
+
+file:"my file.txt" repo:"my repo"
+
+==>
+
+Program(AndExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
+
+# Quoted string in parentheses
+
+("quoted term")
+
+==>
+
+Program(ParenExpr(Term))
+
+# Multiple quoted in parentheses
+
+("first" "second")
+
+==>
+
+Program(ParenExpr(AndExpr(Term,Term)))
+
+# Quoted with escaped newline
+
+"line1\\nline2"
+
+==>
+
+Program(Term)
+
+# Quoted with tab character
+
+"value\\ttab"
+
+==>
+
+Program(Term)
+
+# Lang prefix with quoted value
+
+lang:"objective-c"
+
+==>
+
+Program(PrefixExpr(LangExpr))
+
+# Sym prefix with quoted value
+
+sym:"My Class"
+
+==>
+
+Program(PrefixExpr(SymExpr))
+
+# Content with quoted phrase
+
+content:"TODO: fix this"
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Quoted string with at symbol
+
+"@decorator"
+
+==>
+
+Program(Term)
+
+# Quoted string with hash
+
+"#define"
+
+==>
+
+Program(Term)
+
+# Quoted string with dollar sign
+
+"$variable"
+
+==>
+
+Program(Term)
+
+# Quoted string with percent
+
+"100%"
+
+==>
+
+Program(Term)
+
+# Quoted string with ampersand
+
+"foo&bar"
+
+==>
+
+Program(Term)
+
+# Quoted string with asterisk
+
+"test*"
+
+==>
+
+Program(Term)
+
+# Quoted string with plus
+
+"a+b"
+
+==>
+
+Program(Term)
+
+# Quoted string with equals
+
+"a=b"
+
+==>
+
+Program(Term)
+
+# Quoted string with angle brackets
+
+""
+
+==>
+
+Program(Term)
+
+# Quoted string with pipe
+
+"a|b"
+
+==>
+
+Program(Term)
+
+# Quoted string with tilde
+
+"~/.config"
+
+==>
+
+Program(Term)
+
+# Quoted string with backtick
+
+"`code`"
+
+==>
+
+Program(Term)
+
+# Quoted string with question mark
+
+"what?"
+
+==>
+
+Program(Term)
+
+# Quoted string with exclamation
+
+"important!"
+
+==>
+
+Program(Term)
+
+# Quoted string with semicolon
+
+"stmt;"
+
+==>
+
+Program(Term)
+
+# Quoted string with comma
+
+"a,b,c"
+
+==>
+
+Program(Term)
+
+# Multiple quotes in content
+
+content:"function \"test\" {"
+
+==>
+
+Program(PrefixExpr(ContentExpr))
+
+# Quoted prefix keyword becomes literal
+
+"repo:hello"
+
+==>
+
+Program(Term)
+
+# Quoted file prefix as literal
+
+"file:test.js"
+
+==>
+
+Program(Term)
+
+# Quoted lang prefix as literal
+
+"lang:python"
+
+==>
+
+Program(Term)
+
+# Quoted partial prefix
+
+"repo:"
+
+==>
+
+Program(Term)
+
+# Mix of quoted prefix and real prefix
+
+"repo:test" file:actual.js
+
+==>
+
+Program(AndExpr(Term,PrefixExpr(FileExpr)))
+
+# Quoted short form prefix
+
+"f:test"
+
+==>
+
+Program(Term)
+
+# Quoted revision prefix
+
+"rev:main"
+
+==>
+
+Program(Term)
diff --git a/packages/queryLanguage/tsconfig.json b/packages/queryLanguage/tsconfig.json
new file mode 100644
index 000000000..af60924ef
--- /dev/null
+++ b/packages/queryLanguage/tsconfig.json
@@ -0,0 +1,23 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "Node16",
+ "moduleResolution": "Node16",
+ "lib": ["ES2023"],
+ "outDir": "dist",
+ "rootDir": "src",
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "strict": true,
+ "noImplicitAny": true,
+ "strictNullChecks": true,
+ "esModuleInterop": true,
+ "forceConsistentCasingInFileNames": true,
+ "skipLibCheck": true,
+ "isolatedModules": true,
+ "resolveJsonModule": true
+ },
+ "include": ["src/index.ts"],
+ "exclude": ["node_modules", "dist"]
+ }
\ No newline at end of file
diff --git a/packages/queryLanguage/vitest.config.ts b/packages/queryLanguage/vitest.config.ts
new file mode 100644
index 000000000..7c052526f
--- /dev/null
+++ b/packages/queryLanguage/vitest.config.ts
@@ -0,0 +1,8 @@
+import { defineConfig } from 'vitest/config';
+
+export default defineConfig({
+ test: {
+ environment: 'node',
+ watch: false,
+ }
+});
\ No newline at end of file
diff --git a/packages/web/.eslintignore b/packages/web/.eslintignore
index b1a300673..820122622 100644
--- a/packages/web/.eslintignore
+++ b/packages/web/.eslintignore
@@ -1,3 +1,4 @@
# shadcn components
src/components/
-next-env.d.ts
\ No newline at end of file
+next-env.d.ts
+src/proto/**
\ No newline at end of file
diff --git a/packages/web/package.json b/packages/web/package.json
index ba3c3dbc4..a4f580113 100644
--- a/packages/web/package.json
+++ b/packages/web/package.json
@@ -8,6 +8,7 @@
"start": "next start",
"lint": "cross-env SKIP_ENV_VALIDATION=1 eslint .",
"test": "cross-env SKIP_ENV_VALIDATION=1 vitest",
+ "generate:protos": "proto-loader-gen-types --includeComments --longs=Number --enums=String --defaults --oneofs --grpcLib=@grpc/grpc-js --keepCase --includeDirs=../../vendor/zoekt/grpc/protos --outDir=src/proto zoekt/webserver/v1/webserver.proto zoekt/webserver/v1/query.proto",
"dev:emails": "email dev --dir ./src/emails",
"stripe:listen": "stripe listen --forward-to http://localhost:3000/api/stripe"
},
@@ -52,6 +53,8 @@
"@codemirror/state": "^6.4.1",
"@codemirror/view": "^6.33.0",
"@floating-ui/react": "^0.27.2",
+ "@grpc/grpc-js": "^1.14.1",
+ "@grpc/proto-loader": "^0.8.0",
"@hookform/resolvers": "^3.9.0",
"@iconify/react": "^5.1.0",
"@iizukak/codemirror-lang-wgsl": "^0.3.0",
@@ -91,6 +94,7 @@
"@shopify/lang-jsonc": "^1.0.0",
"@sourcebot/codemirror-lang-tcl": "^1.0.12",
"@sourcebot/db": "workspace:*",
+ "@sourcebot/query-language": "workspace:*",
"@sourcebot/schemas": "workspace:*",
"@sourcebot/shared": "workspace:*",
"@ssddanbrown/codemirror-lang-twig": "^1.0.0",
diff --git a/packages/web/src/actions.ts b/packages/web/src/actions.ts
index e194f808a..cc239fa9d 100644
--- a/packages/web/src/actions.ts
+++ b/packages/web/src/actions.ts
@@ -48,6 +48,10 @@ export const sew = async (fn: () => Promise): Promise =>
Sentry.captureException(e);
logger.error(e);
+ if (e instanceof ServiceErrorException) {
+ return e.serviceError;
+ }
+
if (e instanceof Error) {
return unexpectedError(e.message);
}
diff --git a/packages/web/src/app/[domain]/browse/layout.tsx b/packages/web/src/app/[domain]/browse/layout.tsx
index 6807a38fa..d8c7efd20 100644
--- a/packages/web/src/app/[domain]/browse/layout.tsx
+++ b/packages/web/src/app/[domain]/browse/layout.tsx
@@ -29,7 +29,9 @@ export default function Layout({
>
diff --git a/packages/web/src/app/[domain]/components/lightweightCodeHighlighter.tsx b/packages/web/src/app/[domain]/components/lightweightCodeHighlighter.tsx
index bb5912eaa..f72e64854 100644
--- a/packages/web/src/app/[domain]/components/lightweightCodeHighlighter.tsx
+++ b/packages/web/src/app/[domain]/components/lightweightCodeHighlighter.tsx
@@ -6,7 +6,7 @@ import { memo, useEffect, useMemo, useState } from 'react'
import { useCodeMirrorHighlighter } from '@/hooks/useCodeMirrorHighlighter'
import tailwind from '@/tailwind'
import { measure } from '@/lib/utils'
-import { SourceRange } from '@/features/search/types'
+import { SourceRange } from '@/features/search'
// Define a plain text language
const plainTextLanguage = StreamLanguage.define({
diff --git a/packages/web/src/app/[domain]/components/pathHeader.tsx b/packages/web/src/app/[domain]/components/pathHeader.tsx
index d65d2c35c..7d373b2e7 100644
--- a/packages/web/src/app/[domain]/components/pathHeader.tsx
+++ b/packages/web/src/app/[domain]/components/pathHeader.tsx
@@ -233,7 +233,7 @@ export const PathHeader = ({
}}
>
@
- {`${branchDisplayName}`}
+ {`${branchDisplayName.replace(/^refs\/(heads|tags)\//, '')}`}
)}
·
diff --git a/packages/web/src/app/[domain]/components/searchBar/constants.ts b/packages/web/src/app/[domain]/components/searchBar/constants.ts
index c637bee9f..ea93cee87 100644
--- a/packages/web/src/app/[domain]/components/searchBar/constants.ts
+++ b/packages/web/src/app/[domain]/components/searchBar/constants.ts
@@ -16,57 +16,53 @@ export enum SearchPrefix {
sym = "sym:",
content = "content:",
archived = "archived:",
- case = "case:",
fork = "fork:",
- public = "public:",
+ visibility = "visibility:",
context = "context:",
}
-export const publicModeSuggestions: Suggestion[] = [
+export const visibilityModeSuggestions: Suggestion[] = [
{
- value: "yes",
+ value: "public",
description: "Only include results from public repositories."
},
{
- value: "no",
+ value: "private",
description: "Only include results from private repositories."
},
-];
-
-export const forkModeSuggestions: Suggestion[] = [
- {
- value: "yes",
- description: "Only include results from forked repositories."
- },
{
- value: "no",
- description: "Only include results from non-forked repositories."
+ value: "any",
+ description: "Include results from both public and private repositories (default)."
},
];
-export const caseModeSuggestions: Suggestion[] = [
- {
- value: "auto",
- description: "Search patterns are case-insensitive if all characters are lowercase, and case sensitive otherwise (default)."
- },
+export const forkModeSuggestions: Suggestion[] = [
{
value: "yes",
- description: "Case sensitive search."
+ description: "Include results from forked repositories (default)."
},
{
value: "no",
- description: "Case insensitive search."
+ description: "Exclude results from forked repositories."
},
+ {
+ value: "only",
+ description: "Only include results from forked repositories."
+ }
];
export const archivedModeSuggestions: Suggestion[] = [
{
value: "yes",
- description: "Only include results in archived repositories."
+ description: "Include results from archived repositories (default)."
},
{
value: "no",
- description: "Only include results in non-archived repositories."
+ description: "Exclude results from archived repositories."
},
+ {
+ value: "only",
+ description: "Only include results from archived repositories."
+ }
];
diff --git a/packages/web/src/app/[domain]/components/searchBar/searchBar.tsx b/packages/web/src/app/[domain]/components/searchBar/searchBar.tsx
index 6dbd47cd8..9ec5f6647 100644
--- a/packages/web/src/app/[domain]/components/searchBar/searchBar.tsx
+++ b/packages/web/src/app/[domain]/components/searchBar/searchBar.tsx
@@ -42,14 +42,18 @@ import { Separator } from "@/components/ui/separator";
import { Tooltip, TooltipTrigger, TooltipContent } from "@/components/ui/tooltip";
import { Toggle } from "@/components/ui/toggle";
import { useDomain } from "@/hooks/useDomain";
-import { KeyboardShortcutHint } from "@/app/components/keyboardShortcutHint";
import { createAuditAction } from "@/ee/features/audit/actions";
import tailwind from "@/tailwind";
+import { CaseSensitiveIcon, RegexIcon } from "lucide-react";
interface SearchBarProps {
className?: string;
size?: "default" | "sm";
- defaultQuery?: string;
+ defaults?: {
+ isRegexEnabled?: boolean;
+ isCaseSensitivityEnabled?: boolean;
+ query?: string;
+ }
autoFocus?: boolean;
}
@@ -91,8 +95,12 @@ const searchBarContainerVariants = cva(
export const SearchBar = ({
className,
size,
- defaultQuery,
autoFocus,
+ defaults: {
+ isRegexEnabled: defaultIsRegexEnabled = false,
+ isCaseSensitivityEnabled: defaultIsCaseSensitivityEnabled = false,
+ query: defaultQuery = "",
+ } = {}
}: SearchBarProps) => {
const router = useRouter();
const domain = useDomain();
@@ -102,11 +110,13 @@ export const SearchBar = ({
const [isSuggestionsEnabled, setIsSuggestionsEnabled] = useState(false);
const [isSuggestionsBoxFocused, setIsSuggestionsBoxFocused] = useState(false);
const [isHistorySearchEnabled, setIsHistorySearchEnabled] = useState(false);
+ const [isRegexEnabled, setIsRegexEnabled] = useState(defaultIsRegexEnabled);
+ const [isCaseSensitivityEnabled, setIsCaseSensitivityEnabled] = useState(defaultIsCaseSensitivityEnabled);
const focusEditor = useCallback(() => editorRef.current?.view?.focus(), []);
const focusSuggestionsBox = useCallback(() => suggestionBoxRef.current?.focus(), []);
- const [_query, setQuery] = useState(defaultQuery ?? "");
+ const [_query, setQuery] = useState(defaultQuery);
const query = useMemo(() => {
// Replace any newlines with spaces to handle
// copy & pasting text with newlines.
@@ -215,9 +225,11 @@ export const SearchBar = ({
const url = createPathWithQueryParams(`/${domain}/search`,
[SearchQueryParams.query, query],
+ [SearchQueryParams.isRegexEnabled, isRegexEnabled ? "true" : null],
+ [SearchQueryParams.isCaseSensitivityEnabled, isCaseSensitivityEnabled ? "true" : null],
);
router.push(url);
- }, [domain, router]);
+ }, [domain, router, isRegexEnabled, isCaseSensitivityEnabled]);
return (
-
-
-
-
-
-
-
- Focus search bar
-
-
+
+
+
+
+
+
+
+
+
+
+ {isCaseSensitivityEnabled ? "Disable" : "Enable"} case sensitivity
+
+
+
+
+
+
+
+
+
+
+
+ {isRegexEnabled ? "Disable" : "Enable"} regular expressions
+
+
+
{
switch (suggestionMode) {
- case "public":
+ case "visibility":
return {
- list: publicModeSuggestions,
+ list: visibilityModeSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(),
}
case "fork":
@@ -147,11 +145,6 @@ const SearchSuggestionsBox = forwardRef(({
list: forkModeSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(),
}
- case "case":
- return {
- list: caseModeSuggestions,
- onSuggestionClicked: createOnSuggestionClickedHandler(),
- }
case "archived":
return {
list: archivedModeSuggestions,
@@ -183,7 +176,7 @@ const SearchSuggestionsBox = forwardRef(({
case "file":
return {
list: fileSuggestions,
- onSuggestionClicked: createOnSuggestionClickedHandler(),
+ onSuggestionClicked: createOnSuggestionClickedHandler({ regexEscaped: true }),
isClientSideSearchEnabled: false,
DefaultIcon: VscFile,
}
diff --git a/packages/web/src/app/[domain]/components/searchBar/useRefineModeSuggestions.ts b/packages/web/src/app/[domain]/components/searchBar/useRefineModeSuggestions.ts
index fdc16d504..1c8df2a89 100644
--- a/packages/web/src/app/[domain]/components/searchBar/useRefineModeSuggestions.ts
+++ b/packages/web/src/app/[domain]/components/searchBar/useRefineModeSuggestions.ts
@@ -26,7 +26,7 @@ export const useRefineModeSuggestions = () => {
},
] : []),
{
- value: SearchPrefix.public,
+ value: SearchPrefix.visibility,
description: "Filter on repository visibility."
},
{
@@ -86,10 +86,6 @@ export const useRefineModeSuggestions = () => {
value: SearchPrefix.archived,
description: "Include results from archived repositories.",
},
- {
- value: SearchPrefix.case,
- description: "Control case-sensitivity of search patterns."
- },
{
value: SearchPrefix.fork,
description: "Include only results from forked repositories."
diff --git a/packages/web/src/app/[domain]/components/searchBar/useSuggestionModeMappings.ts b/packages/web/src/app/[domain]/components/searchBar/useSuggestionModeMappings.ts
index da03fd6b5..64e4acfb9 100644
--- a/packages/web/src/app/[domain]/components/searchBar/useSuggestionModeMappings.ts
+++ b/packages/web/src/app/[domain]/components/searchBar/useSuggestionModeMappings.ts
@@ -70,12 +70,6 @@ export const useSuggestionModeMappings = () => {
SearchPrefix.archived
]
},
- {
- suggestionMode: "case",
- prefixes: [
- SearchPrefix.case
- ]
- },
{
suggestionMode: "fork",
prefixes: [
@@ -83,9 +77,9 @@ export const useSuggestionModeMappings = () => {
]
},
{
- suggestionMode: "public",
+ suggestionMode: "visibility",
prefixes: [
- SearchPrefix.public
+ SearchPrefix.visibility
]
},
...(isSearchContextsEnabled ? [
diff --git a/packages/web/src/app/[domain]/components/searchBar/useSuggestionsData.ts b/packages/web/src/app/[domain]/components/searchBar/useSuggestionsData.ts
index 69e1040c0..e22797583 100644
--- a/packages/web/src/app/[domain]/components/searchBar/useSuggestionsData.ts
+++ b/packages/web/src/app/[domain]/components/searchBar/useSuggestionsData.ts
@@ -5,7 +5,7 @@ import { Suggestion, SuggestionMode } from "./searchSuggestionsBox";
import { getRepos, search } from "@/app/api/(client)/client";
import { getSearchContexts } from "@/actions";
import { useMemo } from "react";
-import { SearchSymbol } from "@/features/search/types";
+import { SearchSymbol } from "@/features/search";
import { languageMetadataMap } from "@/lib/languageMetadata";
import {
VscSymbolClass,
diff --git a/packages/web/src/app/[domain]/components/searchBar/zoektLanguageExtension.ts b/packages/web/src/app/[domain]/components/searchBar/zoektLanguageExtension.ts
index 1dad70bc7..6d86a710d 100644
--- a/packages/web/src/app/[domain]/components/searchBar/zoektLanguageExtension.ts
+++ b/packages/web/src/app/[domain]/components/searchBar/zoektLanguageExtension.ts
@@ -47,7 +47,7 @@ export const zoekt = () => {
// Check for prefixes first
// If these match, we return 'keyword'
- if (stream.match(/(archived:|branch:|b:|rev:|c:|case:|content:|f:|file:|fork:|public:|r:|repo:|regex:|lang:|sym:|t:|type:|context:)/)) {
+ if (stream.match(/(archived:|rev:|content:|f:|file:|fork:|visibility:|r:|repo:|regex:|lang:|sym:|t:|type:|context:)/)) {
return t.keyword.toString();
}
diff --git a/packages/web/src/app/[domain]/components/syntaxReferenceGuide.tsx b/packages/web/src/app/[domain]/components/syntaxReferenceGuide.tsx
index 52ec7aea6..39f84ae2c 100644
--- a/packages/web/src/app/[domain]/components/syntaxReferenceGuide.tsx
+++ b/packages/web/src/app/[domain]/components/syntaxReferenceGuide.tsx
@@ -15,6 +15,7 @@ import { useCallback, useRef } from "react";
import { useHotkeys } from "react-hotkeys-hook";
import { useSyntaxGuide } from "./syntaxGuideProvider";
import { CodeSnippet } from "@/app/components/codeSnippet";
+import { ExternalLinkIcon, RegexIcon } from "lucide-react";
const LINGUIST_LINK = "https://github.com/github-linguist/linguist/blob/main/lib/linguist/languages.yml";
const CTAGS_LINK = "https://ctags.io/";
@@ -61,70 +62,92 @@ export const SyntaxReferenceGuide = () => {
onOpenChange={handleOpenChange}
>
- Syntax Reference Guide
+ Syntax Reference Guide
- Queries consist of space-seperated regular expressions. Wrapping expressions in {`""`} combines them. By default, a file must have at least one match for each expression to be included.
+ Queries consist of space-separated search patterns that are matched against file contents. A file must have at least one match for each expression to be included. Queries can optionally contain search filters to further refine the search results.
-
-
-
- Example
- Explanation
-
-
-
-
- foo
- Match files with regex /foo/
-
-
- foo bar
- Match files with regex /foo/ and /bar/
-
-
- {`"foo bar"`}
- Match files with regex /foo bar/
-
-
-
-
-
- {`Multiple expressions can be or'd together with `}or , negated with - , or grouped with () .
-
-
-
-
- Example
- Explanation
-
-
-
-
- foo or bar
- Match files with regex /foo/ or /bar/
-
-
- foo -bar
- Match files with regex /foo/ but not /bar/
-
-
- foo (bar or baz)
- Match files with regex /foo/ and either /bar/ or /baz/
-
-
-
+
+
Keyword search (default)
+
+ Keyword search matches search patterns exactly in file contents. Wrapping search patterns in {`""`} combines them as a single expression.
+
+
+
+
+ Example
+ Explanation
+
+
+
+
+ foo
+ Match files containing the keyword foo
+
+
+ foo bar
+ Match files containing both foo and bar
+
+
+ {`"foo bar"`}
+ Match files containing the phrase foo bar
+
+
+ {'"foo \\"bar\\""'}
+ Match files containing foo "bar" exactly (escaped quotes)
+
+
+
+
+
+
+
+
+
Regex search
+
+ Toggle the button to interpret search patterns as regular expressions.
+
+
+
+
+ Example
+ Explanation
+
+
+
+
+ foo
+ Match files with regex /foo/
+
+
+ foo.*bar
+ Match files with regex /foo.*bar/ (foo followed by any characters, then bar)
+
+
+ {`^function\\s+\\w+`}
+ Match files with regex /^function\s+\w+/ (function at start of line, followed by whitespace and word characters)
+
+
+ {`"foo bar"`}
+ Match files with regex /foo bar/ . Quotes are not matched.
+
+
+
+
-
-
- Expressions can be prefixed with certain keywords to modify search behavior. Some keywords can be negated using the - prefix.
-
+
-
+
+
Search filters
+
+ Search queries (keyword or regex) can include multiple search filters to further refine the search results. Some filters can be negated using the - prefix.
+
+
+
Prefix
@@ -219,7 +242,39 @@ export const SyntaxReferenceGuide = () => {
-
+
+
+
+
+
+
+
Boolean operators & grouping
+
+ By default, space-seperated expressions are and'd together. Using the or keyword as well as parantheses () can be used to create more complex boolean logic. Parantheses can be negated using the - prefix.
+
+
+
+
+ Example
+ Explanation
+
+
+
+
+ foo or bar
+ Match files containing foo or bar
+
+
+ foo (bar or baz)
+ Match files containing foo and either bar or baz .
+
+
+ -(foo) bar
+ Match files containing bar and not foo .
+
+
+
+
)
diff --git a/packages/web/src/app/[domain]/search/components/codePreviewPanel/codePreview.tsx b/packages/web/src/app/[domain]/search/components/codePreviewPanel/codePreview.tsx
index 2d2eadbca..e62917ced 100644
--- a/packages/web/src/app/[domain]/search/components/codePreviewPanel/codePreview.tsx
+++ b/packages/web/src/app/[domain]/search/components/codePreviewPanel/codePreview.tsx
@@ -3,7 +3,7 @@
import { EditorContextMenu } from "@/app/[domain]/components/editorContextMenu";
import { Button } from "@/components/ui/button";
import { ScrollArea } from "@/components/ui/scroll-area";
-import { SearchResultChunk } from "@/features/search/types";
+import { SearchResultChunk } from "@/features/search";
import { useCodeMirrorTheme } from "@/hooks/useCodeMirrorTheme";
import { useKeymapExtension } from "@/hooks/useKeymapExtension";
import { useCodeMirrorLanguageExtension } from "@/hooks/useCodeMirrorLanguageExtension";
diff --git a/packages/web/src/app/[domain]/search/components/codePreviewPanel/index.tsx b/packages/web/src/app/[domain]/search/components/codePreviewPanel/index.tsx
index d1a7e66f0..d19b15b90 100644
--- a/packages/web/src/app/[domain]/search/components/codePreviewPanel/index.tsx
+++ b/packages/web/src/app/[domain]/search/components/codePreviewPanel/index.tsx
@@ -2,7 +2,7 @@
import { useQuery } from "@tanstack/react-query";
import { CodePreview } from "./codePreview";
-import { SearchResultFile } from "@/features/search/types";
+import { SearchResultFile } from "@/features/search";
import { SymbolIcon } from "@radix-ui/react-icons";
import { SetStateAction, Dispatch, useMemo } from "react";
import { unwrapServiceError } from "@/lib/utils";
diff --git a/packages/web/src/app/[domain]/search/components/filterPanel/filter.tsx b/packages/web/src/app/[domain]/search/components/filterPanel/filter.tsx
index 82db01276..0a76a50e6 100644
--- a/packages/web/src/app/[domain]/search/components/filterPanel/filter.tsx
+++ b/packages/web/src/app/[domain]/search/components/filterPanel/filter.tsx
@@ -5,6 +5,7 @@ import { compareEntries, Entry } from "./entry";
import { Input } from "@/components/ui/input";
import Fuse from "fuse.js";
import { cn } from "@/lib/utils"
+import { Skeleton } from "@/components/ui/skeleton";
interface FilterProps {
title: string,
@@ -12,6 +13,7 @@ interface FilterProps {
entries: Entry[],
onEntryClicked: (key: string) => void,
className?: string,
+ isStreaming: boolean,
}
export const Filter = ({
@@ -20,6 +22,7 @@ export const Filter = ({
entries,
onEntryClicked,
className,
+ isStreaming,
}: FilterProps) => {
const [searchFilter, setSearchFilter] = useState("");
@@ -43,27 +46,34 @@ export const Filter = ({
className
)}>
{title}
-
- setSearchFilter(event.target.value)}
- />
-
-
-
+
+
+ {filteredEntries
+ .sort((entryA, entryB) => compareEntries(entryB, entryA))
+ .map((entry) => (
+ onEntryClicked(entry.key)}
+ />
+ ))}
+
+ >
+ )}
+
)
}
diff --git a/packages/web/src/app/[domain]/search/components/filterPanel/index.tsx b/packages/web/src/app/[domain]/search/components/filterPanel/index.tsx
index 231cda184..f3155f79f 100644
--- a/packages/web/src/app/[domain]/search/components/filterPanel/index.tsx
+++ b/packages/web/src/app/[domain]/search/components/filterPanel/index.tsx
@@ -1,7 +1,7 @@
'use client';
import { FileIcon } from "@/components/ui/fileIcon";
-import { RepositoryInfo, SearchResultFile } from "@/features/search/types";
+import { RepositoryInfo, SearchResultFile } from "@/features/search";
import { cn, getCodeHostInfoForRepo } from "@/lib/utils";
import { LaptopIcon } from "@radix-ui/react-icons";
import Image from "next/image";
@@ -15,6 +15,8 @@ import { useGetSelectedFromQuery } from "./useGetSelectedFromQuery";
interface FilePanelProps {
matches: SearchResultFile[];
repoInfo: Record;
+ onFilterChange?: () => void;
+ isStreaming: boolean;
}
/**
@@ -31,10 +33,14 @@ interface FilePanelProps {
*
* @param matches - Array of search result files to filter
* @param repoInfo - Information about repositories including their display names and icons
+ * @param onFilterChange - Optional callback that is called whenever a filter is applied or removed
+ * @param isStreaming - Whether the search is streaming
*/
export const FilterPanel = ({
matches,
repoInfo,
+ onFilterChange,
+ isStreaming,
}: FilePanelProps) => {
const router = useRouter();
const searchParams = useSearchParams();
@@ -148,9 +154,11 @@ export const FilterPanel = ({
if (newParams.toString() !== searchParams.toString()) {
router.replace(`?${newParams.toString()}`, { scroll: false });
+ onFilterChange?.();
}
}}
className="max-h-[50%]"
+ isStreaming={isStreaming}
/>
)
diff --git a/packages/web/src/app/[domain]/search/components/filterPanel/useFilterMatches.ts b/packages/web/src/app/[domain]/search/components/filterPanel/useFilterMatches.ts
index 5951d8ea4..6a66f97a1 100644
--- a/packages/web/src/app/[domain]/search/components/filterPanel/useFilterMatches.ts
+++ b/packages/web/src/app/[domain]/search/components/filterPanel/useFilterMatches.ts
@@ -1,6 +1,6 @@
'use client';
-import { SearchResultFile } from "@/features/search/types";
+import { SearchResultFile } from "@/features/search";
import { useMemo } from "react";
import { useGetSelectedFromQuery } from "./useGetSelectedFromQuery";
diff --git a/packages/web/src/app/[domain]/search/components/searchResultsPage.tsx b/packages/web/src/app/[domain]/search/components/searchResultsPage.tsx
index 9c33e11d4..602082c25 100644
--- a/packages/web/src/app/[domain]/search/components/searchResultsPage.tsx
+++ b/packages/web/src/app/[domain]/search/components/searchResultsPage.tsx
@@ -11,76 +11,76 @@ import {
} from "@/components/ui/resizable";
import { Separator } from "@/components/ui/separator";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
-import { RepositoryInfo, SearchResultFile, SearchStats } from "@/features/search/types";
+import { RepositoryInfo, SearchResultFile, SearchStats } from "@/features/search";
import useCaptureEvent from "@/hooks/useCaptureEvent";
import { useDomain } from "@/hooks/useDomain";
import { useNonEmptyQueryParam } from "@/hooks/useNonEmptyQueryParam";
import { useSearchHistory } from "@/hooks/useSearchHistory";
import { SearchQueryParams } from "@/lib/types";
-import { createPathWithQueryParams, measure, unwrapServiceError } from "@/lib/utils";
-import { InfoCircledIcon, SymbolIcon } from "@radix-ui/react-icons";
-import { useQuery } from "@tanstack/react-query";
+import { createPathWithQueryParams } from "@/lib/utils";
+import { InfoCircledIcon } from "@radix-ui/react-icons";
import { useLocalStorage } from "@uidotdev/usehooks";
-import { AlertTriangleIcon, BugIcon, FilterIcon } from "lucide-react";
+import { AlertTriangleIcon, BugIcon, FilterIcon, RefreshCwIcon } from "lucide-react";
import { useRouter } from "next/navigation";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useHotkeys } from "react-hotkeys-hook";
import { ImperativePanelHandle } from "react-resizable-panels";
-import { search } from "../../../api/(client)/client";
import { CopyIconButton } from "../../components/copyIconButton";
import { SearchBar } from "../../components/searchBar";
import { TopBar } from "../../components/topBar";
+import { useStreamedSearch } from "../useStreamedSearch";
import { CodePreviewPanel } from "./codePreviewPanel";
import { FilterPanel } from "./filterPanel";
import { useFilteredMatches } from "./filterPanel/useFilterMatches";
-import { SearchResultsPanel } from "./searchResultsPanel";
+import { SearchResultsPanel, SearchResultsPanelHandle } from "./searchResultsPanel";
+import { ServiceErrorException } from "@/lib/serviceError";
interface SearchResultsPageProps {
searchQuery: string;
defaultMaxMatchCount: number;
+ isRegexEnabled: boolean;
+ isCaseSensitivityEnabled: boolean;
}
export const SearchResultsPage = ({
searchQuery,
defaultMaxMatchCount,
+ isRegexEnabled,
+ isCaseSensitivityEnabled,
}: SearchResultsPageProps) => {
const router = useRouter();
const { setSearchHistory } = useSearchHistory();
- const captureEvent = useCaptureEvent();
const domain = useDomain();
const { toast } = useToast();
+ const captureEvent = useCaptureEvent();
// Encodes the number of matches to return in the search response.
const _maxMatchCount = parseInt(useNonEmptyQueryParam(SearchQueryParams.matches) ?? `${defaultMaxMatchCount}`);
const maxMatchCount = isNaN(_maxMatchCount) ? defaultMaxMatchCount : _maxMatchCount;
const {
- data: searchResponse,
- isPending: isSearchPending,
- isFetching: isFetching,
- error
- } = useQuery({
- queryKey: ["search", searchQuery, maxMatchCount],
- queryFn: () => measure(() => unwrapServiceError(search({
- query: searchQuery,
- matches: maxMatchCount,
- contextLines: 3,
- whole: false,
- })), "client.search"),
- select: ({ data, durationMs }) => ({
- ...data,
- totalClientSearchDurationMs: durationMs,
- }),
- enabled: searchQuery.length > 0,
- refetchOnWindowFocus: false,
- retry: false,
- staleTime: 0,
+ error,
+ files,
+ repoInfo,
+ timeToSearchCompletionMs,
+ timeToFirstSearchResultMs,
+ isStreaming,
+ numMatches,
+ isExhaustive,
+ stats,
+ } = useStreamedSearch({
+ query: searchQuery,
+ matches: maxMatchCount,
+ contextLines: 3,
+ whole: false,
+ isRegexEnabled,
+ isCaseSensitivityEnabled,
});
useEffect(() => {
if (error) {
toast({
- description: `❌ Search failed. Reason: ${error.message}`,
+ description: `❌ Search failed. Reason: ${error instanceof ServiceErrorException ? error.serviceError.message : error.message}`,
});
}
}, [error, toast]);
@@ -103,38 +103,51 @@ export const SearchResultsPage = ({
}, [searchQuery, setSearchHistory]);
useEffect(() => {
- if (!searchResponse) {
+ if (isStreaming || !stats) {
return;
}
- const fileLanguages = searchResponse.files?.map(file => file.language) || [];
+ const fileLanguages = files.map(file => file.language) || [];
+
+ console.debug('timeToFirstSearchResultMs:', timeToFirstSearchResultMs);
+ console.debug('timeToSearchCompletionMs:', timeToSearchCompletionMs);
captureEvent("search_finished", {
- durationMs: searchResponse.totalClientSearchDurationMs,
- fileCount: searchResponse.stats.fileCount,
- matchCount: searchResponse.stats.totalMatchCount,
- actualMatchCount: searchResponse.stats.actualMatchCount,
- filesSkipped: searchResponse.stats.filesSkipped,
- contentBytesLoaded: searchResponse.stats.contentBytesLoaded,
- indexBytesLoaded: searchResponse.stats.indexBytesLoaded,
- crashes: searchResponse.stats.crashes,
- shardFilesConsidered: searchResponse.stats.shardFilesConsidered,
- filesConsidered: searchResponse.stats.filesConsidered,
- filesLoaded: searchResponse.stats.filesLoaded,
- shardsScanned: searchResponse.stats.shardsScanned,
- shardsSkipped: searchResponse.stats.shardsSkipped,
- shardsSkippedFilter: searchResponse.stats.shardsSkippedFilter,
- ngramMatches: searchResponse.stats.ngramMatches,
- ngramLookups: searchResponse.stats.ngramLookups,
- wait: searchResponse.stats.wait,
- matchTreeConstruction: searchResponse.stats.matchTreeConstruction,
- matchTreeSearch: searchResponse.stats.matchTreeSearch,
- regexpsConsidered: searchResponse.stats.regexpsConsidered,
- flushReason: searchResponse.stats.flushReason,
+ durationMs: timeToSearchCompletionMs,
+ timeToSearchCompletionMs,
+ timeToFirstSearchResultMs,
+ fileCount: stats.fileCount,
+ matchCount: stats.totalMatchCount,
+ actualMatchCount: stats.actualMatchCount,
+ filesSkipped: stats.filesSkipped,
+ contentBytesLoaded: stats.contentBytesLoaded,
+ indexBytesLoaded: stats.indexBytesLoaded,
+ crashes: stats.crashes,
+ shardFilesConsidered: stats.shardFilesConsidered,
+ filesConsidered: stats.filesConsidered,
+ filesLoaded: stats.filesLoaded,
+ shardsScanned: stats.shardsScanned,
+ shardsSkipped: stats.shardsSkipped,
+ shardsSkippedFilter: stats.shardsSkippedFilter,
+ ngramMatches: stats.ngramMatches,
+ ngramLookups: stats.ngramLookups,
+ wait: stats.wait,
+ matchTreeConstruction: stats.matchTreeConstruction,
+ matchTreeSearch: stats.matchTreeSearch,
+ regexpsConsidered: stats.regexpsConsidered,
+ flushReason: stats.flushReason,
fileLanguages,
+ isSearchExhaustive: isExhaustive,
});
- }, [captureEvent, searchQuery, searchResponse]);
-
+ }, [
+ captureEvent,
+ files,
+ isStreaming,
+ isExhaustive,
+ stats,
+ timeToSearchCompletionMs,
+ timeToFirstSearchResultMs,
+ ]);
const onLoadMoreResults = useCallback(() => {
const url = createPathWithQueryParams(`/${domain}/search`,
@@ -144,6 +157,13 @@ export const SearchResultsPage = ({
router.push(url);
}, [maxMatchCount, router, searchQuery, domain]);
+ // Look for any files that are not on the default branch.
+ const isBranchFilteringEnabled = useMemo(() => {
+ return files.some((file) => {
+ return file.branches?.some((branch) => branch !== 'HEAD') ?? false;
+ });
+ }, [files]);
+
return (
{/* TopBar */}
@@ -152,32 +172,32 @@ export const SearchResultsPage = ({
>
- {(isSearchPending || isFetching) ? (
-
- ) : error ? (
+ {error ? (
Failed to search
-
{error.message}
+
{error instanceof ServiceErrorException ? error.serviceError.message : error.message}
) : (
)}
@@ -186,10 +206,11 @@ export const SearchResultsPage = ({
interface PanelGroupProps {
fileMatches: SearchResultFile[];
- isMoreResultsButtonVisible?: boolean;
onLoadMoreResults: () => void;
+ isStreaming: boolean;
+ isMoreResultsButtonVisible?: boolean;
isBranchFilteringEnabled: boolean;
- repoInfo: RepositoryInfo[];
+ repoInfo: Record;
searchDurationMs: number;
numMatches: number;
searchStats?: SearchStats;
@@ -198,9 +219,10 @@ interface PanelGroupProps {
const PanelGroup = ({
fileMatches,
isMoreResultsButtonVisible,
+ isStreaming,
onLoadMoreResults,
isBranchFilteringEnabled,
- repoInfo: _repoInfo,
+ repoInfo,
searchDurationMs: _searchDurationMs,
numMatches,
searchStats,
@@ -208,6 +230,7 @@ const PanelGroup = ({
const [previewedFile, setPreviewedFile] = useState(undefined);
const filteredFileMatches = useFilteredMatches(fileMatches);
const filterPanelRef = useRef(null);
+ const searchResultsPanelRef = useRef(null);
const [selectedMatchIndex, setSelectedMatchIndex] = useState(0);
const [isFilterPanelCollapsed, setIsFilterPanelCollapsed] = useLocalStorage('isFilterPanelCollapsed', false);
@@ -228,13 +251,6 @@ const PanelGroup = ({
return Math.round(_searchDurationMs);
}, [_searchDurationMs]);
- const repoInfo = useMemo(() => {
- return _repoInfo.reduce((acc, repo) => {
- acc[repo.id] = repo;
- return acc;
- }, {} as Record);
- }, [_repoInfo]);
-
return (
{
+ searchResultsPanelRef.current?.resetScroll();
+ }}
/>
{isFilterPanelCollapsed && (
@@ -291,45 +311,58 @@ const PanelGroup = ({
order={2}
>
-
-
-
-
-
-
-
-
Search stats for nerds
-
{
- navigator.clipboard.writeText(JSON.stringify(searchStats, null, 2));
- return true;
- }}
- className="ml-auto"
- />
-
-
- {JSON.stringify(searchStats, null, 2)}
-
-
-
- {
- fileMatches.length > 0 ? (
-
{`[${searchDurationMs} ms] Found ${numMatches} matches in ${fileMatches.length} ${fileMatches.length > 1 ? 'files' : 'file'}`}
- ) : (
-
No results
- )
- }
- {isMoreResultsButtonVisible && (
-
- (load more)
-
+ {isStreaming ? (
+ <>
+
+
Searching...
+ {numMatches > 0 && (
+
{`Found ${numMatches} matches in ${fileMatches.length} ${fileMatches.length > 1 ? 'files' : 'file'}`}
+ )}
+ >
+ ) : (
+ <>
+
+
+
+
+
+
+
+
Search stats for nerds
+
{
+ navigator.clipboard.writeText(JSON.stringify(searchStats, null, 2));
+ return true;
+ }}
+ className="ml-auto"
+ />
+
+
+ {JSON.stringify(searchStats, null, 2)}
+
+
+
+ {
+ fileMatches.length > 0 ? (
+
{`[${searchDurationMs} ms] Found ${numMatches} matches in ${fileMatches.length} ${fileMatches.length > 1 ? 'files' : 'file'}`}
+ ) : (
+
No results
+ )
+ }
+ {isMoreResultsButtonVisible && (
+
+ (load more)
+
+ )}
+ >
)}
{filteredFileMatches.length > 0 ? (
{
setSelectedMatchIndex(matchIndex ?? 0);
@@ -340,6 +373,11 @@ const PanelGroup = ({
isBranchFilteringEnabled={isBranchFilteringEnabled}
repoInfo={repoInfo}
/>
+ ) : isStreaming ? (
+
) : (
No results found
diff --git a/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatch.tsx b/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatch.tsx
index d6e6b8ab0..334333927 100644
--- a/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatch.tsx
+++ b/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatch.tsx
@@ -1,6 +1,6 @@
'use client';
-import { SearchResultFile, SearchResultChunk } from "@/features/search/types";
+import { SearchResultFile, SearchResultChunk } from "@/features/search";
import { LightweightCodeHighlighter } from "@/app/[domain]/components/lightweightCodeHighlighter";
import Link from "next/link";
import { getBrowsePath } from "@/app/[domain]/browse/hooks/utils";
diff --git a/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatchContainer.tsx b/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatchContainer.tsx
index b10d656a3..2779b301c 100644
--- a/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatchContainer.tsx
+++ b/packages/web/src/app/[domain]/search/components/searchResultsPanel/fileMatchContainer.tsx
@@ -5,7 +5,7 @@ import { Separator } from "@/components/ui/separator";
import { DoubleArrowDownIcon, DoubleArrowUpIcon } from "@radix-ui/react-icons";
import { useMemo } from "react";
import { FileMatch } from "./fileMatch";
-import { RepositoryInfo, SearchResultFile } from "@/features/search/types";
+import { RepositoryInfo, SearchResultFile } from "@/features/search";
import { Button } from "@/components/ui/button";
export const MAX_MATCHES_TO_PREVIEW = 3;
@@ -75,7 +75,7 @@ export const FileMatchContainer = ({
}
return `${branches[0]}${branches.length > 1 ? ` +${branches.length - 1}` : ''}`;
- }, [isBranchFilteringEnabled, branches]);
+ }, [branches, isBranchFilteringEnabled]);
const repo = useMemo(() => {
return repoInfo[file.repositoryId];
diff --git a/packages/web/src/app/[domain]/search/components/searchResultsPanel/index.tsx b/packages/web/src/app/[domain]/search/components/searchResultsPanel/index.tsx
index 61e413322..b2ee8e9e7 100644
--- a/packages/web/src/app/[domain]/search/components/searchResultsPanel/index.tsx
+++ b/packages/web/src/app/[domain]/search/components/searchResultsPanel/index.tsx
@@ -1,10 +1,11 @@
'use client';
-import { RepositoryInfo, SearchResultFile } from "@/features/search/types";
-import { FileMatchContainer, MAX_MATCHES_TO_PREVIEW } from "./fileMatchContainer";
+import { RepositoryInfo, SearchResultFile } from "@/features/search";
import { useVirtualizer, VirtualItem } from "@tanstack/react-virtual";
-import { useCallback, useEffect, useRef, useState } from "react";
-import { useDebounce, usePrevious } from "@uidotdev/usehooks";
+import { useDebounce } from "@uidotdev/usehooks";
+import { forwardRef, useCallback, useEffect, useImperativeHandle, useRef } from "react";
+import { useMap } from "usehooks-ts";
+import { FileMatchContainer, MAX_MATCHES_TO_PREVIEW } from "./fileMatchContainer";
interface SearchResultsPanelProps {
fileMatches: SearchResultFile[];
@@ -15,6 +16,10 @@ interface SearchResultsPanelProps {
repoInfo: Record
;
}
+export interface SearchResultsPanelHandle {
+ resetScroll: () => void;
+}
+
const ESTIMATED_LINE_HEIGHT_PX = 20;
const ESTIMATED_NUMBER_OF_LINES_PER_CODE_CELL = 10;
const ESTIMATED_MATCH_CONTAINER_HEIGHT_PX = 30;
@@ -22,17 +27,25 @@ const ESTIMATED_MATCH_CONTAINER_HEIGHT_PX = 30;
type ScrollHistoryState = {
scrollOffset?: number;
measurementsCache?: VirtualItem[];
- showAllMatchesStates?: boolean[];
+ showAllMatchesMap?: [string, boolean][];
+}
+
+/**
+ * Unique key for a given file match. Used to store the "show all matches" state for a
+ * given file match.
+ */
+const getFileMatchKey = (fileMatch: SearchResultFile) => {
+ return `${fileMatch.repository}-${fileMatch.fileName.text}`;
}
-export const SearchResultsPanel = ({
+export const SearchResultsPanel = forwardRef(({
fileMatches,
onOpenFilePreview,
isLoadMoreButtonVisible,
onLoadMoreButtonClicked,
isBranchFilteringEnabled,
repoInfo,
-}: SearchResultsPanelProps) => {
+}, ref) => {
const parentRef = useRef(null);
// Restore the scroll offset, measurements cache, and other state from the history
@@ -42,17 +55,17 @@ export const SearchResultsPanel = ({
const {
scrollOffset: restoreOffset,
measurementsCache: restoreMeasurementsCache,
- showAllMatchesStates: restoreShowAllMatchesStates,
- } = history.state as ScrollHistoryState;
+ showAllMatchesMap: restoreShowAllMatchesStates,
+ } = (history.state ?? {}) as ScrollHistoryState;
- const [showAllMatchesStates, setShowAllMatchesStates] = useState(restoreShowAllMatchesStates || Array(fileMatches.length).fill(false));
+ const [showAllMatchesMap, showAllMatchesActions] = useMap(restoreShowAllMatchesStates || []);
const virtualizer = useVirtualizer({
count: fileMatches.length,
getScrollElement: () => parentRef.current,
estimateSize: (index) => {
const fileMatch = fileMatches[index];
- const showAllMatches = showAllMatchesStates[index];
+ const showAllMatches = showAllMatchesMap.get(getFileMatchKey(fileMatch));
// Quick guesstimation ;) This needs to be quick since the virtualizer will
// run this upfront for all items in the list.
@@ -73,38 +86,33 @@ export const SearchResultsPanel = ({
debug: false,
});
- // When the number of file matches changes, we need to reset our scroll state.
- const prevFileMatches = usePrevious(fileMatches);
- useEffect(() => {
- if (!prevFileMatches) {
- return;
- }
+ const resetScroll = useCallback(() => {
+ virtualizer.scrollToIndex(0);
+ }, [virtualizer]);
+
+ // Expose the resetScroll function to parent components
+ useImperativeHandle(ref, () => ({
+ resetScroll,
+ }), [resetScroll]);
- if (prevFileMatches.length !== fileMatches.length) {
- setShowAllMatchesStates(Array(fileMatches.length).fill(false));
- virtualizer.scrollToIndex(0);
- }
- }, [fileMatches.length, prevFileMatches, virtualizer]);
// Save the scroll state to the history stack.
- const debouncedScrollOffset = useDebounce(virtualizer.scrollOffset, 100);
+ const debouncedScrollOffset = useDebounce(virtualizer.scrollOffset, 500);
useEffect(() => {
history.replaceState(
{
scrollOffset: debouncedScrollOffset ?? undefined,
measurementsCache: virtualizer.measurementsCache,
- showAllMatchesStates,
+ showAllMatchesMap: Array.from(showAllMatchesMap.entries()),
} satisfies ScrollHistoryState,
'',
window.location.href
);
- }, [debouncedScrollOffset, virtualizer.measurementsCache, showAllMatchesStates]);
+ }, [debouncedScrollOffset, virtualizer.measurementsCache, showAllMatchesMap]);
- const onShowAllMatchesButtonClicked = useCallback((index: number) => {
- const states = [...showAllMatchesStates];
- const wasShown = states[index];
- states[index] = !wasShown;
- setShowAllMatchesStates(states);
+ const onShowAllMatchesButtonClicked = useCallback((fileMatchKey: string, index: number) => {
+ const wasShown = showAllMatchesMap.get(fileMatchKey) ?? false;
+ showAllMatchesActions.set(fileMatchKey, !wasShown);
// When collapsing, scroll to the top of the file match container. This ensures
// that the focused "show fewer matches" button is visible.
@@ -113,7 +121,7 @@ export const SearchResultsPanel = ({
align: 'start'
});
}
- }, [showAllMatchesStates, virtualizer]);
+ }, [showAllMatchesActions, showAllMatchesMap, virtualizer]);
return (
@@ -153,9 +161,9 @@ export const SearchResultsPanel = ({
onOpenFilePreview={(matchIndex) => {
onOpenFilePreview(file, matchIndex);
}}
- showAllMatches={showAllMatchesStates[virtualRow.index]}
+ showAllMatches={showAllMatchesMap.get(getFileMatchKey(file)) ?? false}
onShowAllMatchesButtonClicked={() => {
- onShowAllMatchesButtonClicked(virtualRow.index);
+ onShowAllMatchesButtonClicked(getFileMatchKey(file), virtualRow.index);
}}
isBranchFilteringEnabled={isBranchFilteringEnabled}
repoInfo={repoInfo}
@@ -177,4 +185,6 @@ export const SearchResultsPanel = ({
)}
)
-}
\ No newline at end of file
+});
+
+SearchResultsPanel.displayName = 'SearchResultsPanel';
diff --git a/packages/web/src/app/[domain]/search/page.tsx b/packages/web/src/app/[domain]/search/page.tsx
index 8677e2872..d1f4e03a7 100644
--- a/packages/web/src/app/[domain]/search/page.tsx
+++ b/packages/web/src/app/[domain]/search/page.tsx
@@ -4,13 +4,19 @@ import { SearchResultsPage } from "./components/searchResultsPage";
interface SearchPageProps {
params: Promise<{ domain: string }>;
- searchParams: Promise<{ query?: string }>;
+ searchParams: Promise<{
+ query?: string;
+ isRegexEnabled?: "true" | "false";
+ isCaseSensitivityEnabled?: "true" | "false";
+ }>;
}
export default async function SearchPage(props: SearchPageProps) {
const { domain } = await props.params;
const searchParams = await props.searchParams;
const query = searchParams?.query;
+ const isRegexEnabled = searchParams?.isRegexEnabled === "true";
+ const isCaseSensitivityEnabled = searchParams?.isCaseSensitivityEnabled === "true";
if (query === undefined || query.length === 0) {
return
@@ -20,6 +26,8 @@ export default async function SearchPage(props: SearchPageProps) {
)
}
diff --git a/packages/web/src/app/[domain]/search/useStreamedSearch.ts b/packages/web/src/app/[domain]/search/useStreamedSearch.ts
new file mode 100644
index 000000000..b4f079fdd
--- /dev/null
+++ b/packages/web/src/app/[domain]/search/useStreamedSearch.ts
@@ -0,0 +1,288 @@
+'use client';
+
+import { RepositoryInfo, SearchRequest, SearchResultFile, SearchStats, StreamedSearchResponse } from '@/features/search';
+import { ServiceErrorException } from '@/lib/serviceError';
+import { isServiceError } from '@/lib/utils';
+import * as Sentry from '@sentry/nextjs';
+import { useCallback, useEffect, useRef, useState } from 'react';
+
+interface CacheEntry {
+ files: SearchResultFile[];
+ repoInfo: Record;
+ numMatches: number;
+ timeToSearchCompletionMs: number;
+ timeToFirstSearchResultMs: number;
+ timestamp: number;
+ isExhaustive: boolean;
+}
+
+const searchCache = new Map();
+const CACHE_TTL = 5 * 60 * 1000;
+
+const createCacheKey = (params: SearchRequest): string => {
+ return JSON.stringify({
+ query: params.query,
+ matches: params.matches,
+ contextLines: params.contextLines,
+ whole: params.whole,
+ isRegexEnabled: params.isRegexEnabled,
+ isCaseSensitivityEnabled: params.isCaseSensitivityEnabled,
+ });
+};
+
+const isCacheValid = (entry: CacheEntry): boolean => {
+ return Date.now() - entry.timestamp < CACHE_TTL;
+};
+
+export const useStreamedSearch = ({ query, matches, contextLines, whole, isRegexEnabled, isCaseSensitivityEnabled }: SearchRequest) => {
+ const [state, setState] = useState<{
+ isStreaming: boolean,
+ isExhaustive: boolean,
+ error: Error | null,
+ files: SearchResultFile[],
+ repoInfo: Record,
+ timeToSearchCompletionMs: number,
+ timeToFirstSearchResultMs: number,
+ numMatches: number,
+ stats?: SearchStats,
+ }>({
+ isStreaming: false,
+ isExhaustive: false,
+ error: null,
+ files: [],
+ repoInfo: {},
+ timeToSearchCompletionMs: 0,
+ timeToFirstSearchResultMs: 0,
+ numMatches: 0,
+ stats: undefined,
+ });
+
+ const abortControllerRef = useRef(null);
+
+ const cancel = useCallback(() => {
+ if (abortControllerRef.current) {
+ abortControllerRef.current.abort();
+ abortControllerRef.current = null;
+ }
+ setState(prev => ({
+ ...prev,
+ isStreaming: false,
+ }));
+ }, []);
+
+ useEffect(() => {
+ const search = async () => {
+ const startTime = performance.now();
+
+ if (abortControllerRef.current) {
+ abortControllerRef.current.abort();
+ }
+ abortControllerRef.current = new AbortController();
+
+ const cacheKey = createCacheKey({
+ query,
+ matches,
+ contextLines,
+ whole,
+ isRegexEnabled,
+ isCaseSensitivityEnabled,
+ });
+
+ // Check if we have a valid cached result. If so, use it.
+ const cachedEntry = searchCache.get(cacheKey);
+ if (cachedEntry && isCacheValid(cachedEntry)) {
+ console.debug('Using cached search results');
+ setState({
+ isStreaming: false,
+ isExhaustive: cachedEntry.isExhaustive,
+ error: null,
+ files: cachedEntry.files,
+ repoInfo: cachedEntry.repoInfo,
+ timeToSearchCompletionMs: cachedEntry.timeToSearchCompletionMs,
+ timeToFirstSearchResultMs: cachedEntry.timeToFirstSearchResultMs,
+ numMatches: cachedEntry.numMatches,
+ });
+ return;
+ }
+
+ setState({
+ isStreaming: true,
+ isExhaustive: false,
+ error: null,
+ files: [],
+ repoInfo: {},
+ timeToSearchCompletionMs: 0,
+ timeToFirstSearchResultMs: 0,
+ numMatches: 0,
+ });
+
+ try {
+ const response = await fetch('/api/stream_search', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ query,
+ matches,
+ contextLines,
+ whole,
+ isRegexEnabled,
+ isCaseSensitivityEnabled,
+ }),
+ signal: abortControllerRef.current.signal,
+ });
+
+ if (!response.ok) {
+ // Check if this is a service error response
+ const contentType = response.headers.get('content-type');
+ if (contentType?.includes('application/json')) {
+ const errorData = await response.json();
+ if (isServiceError(errorData)) {
+ throw new ServiceErrorException(errorData);
+ }
+ }
+ throw new Error(`HTTP error! status: ${response.status}`);
+ }
+
+ if (!response.body) {
+ throw new Error('No response body');
+ }
+
+ const reader = response.body.getReader();
+ const decoder = new TextDecoder();
+ let buffer = '';
+ let numMessagesProcessed = 0;
+
+ while (true as boolean) {
+ const { done, value } = await reader.read();
+
+ if (done) {
+ break;
+ }
+
+ // Decode the chunk and add to buffer
+ buffer += decoder.decode(value, { stream: true });
+
+ // Process complete SSE messages (separated by \n\n)
+ const messages = buffer.split('\n\n');
+
+ // Keep the last element (potentially incomplete message) in the buffer for the next chunk.
+ // Stream chunks can split messages mid-way, so we only process complete messages.
+ buffer = messages.pop() || '';
+
+ for (const message of messages) {
+ if (!message.trim()) {
+ continue;
+ }
+
+ // SSE messages start with "data: "
+ const dataMatch = message.match(/^data: (.+)$/);
+ if (!dataMatch) {
+ continue;
+ }
+
+ const data = dataMatch[1];
+
+ // Check for completion signal
+ if (data === '[DONE]') {
+ break;
+ }
+
+ const response: StreamedSearchResponse = JSON.parse(data);
+ const isFirstMessage = numMessagesProcessed === 0;
+ switch (response.type) {
+ case 'chunk':
+ setState(prev => ({
+ ...prev,
+ files: [
+ ...prev.files,
+ ...response.files
+ ],
+ repoInfo: {
+ ...prev.repoInfo,
+ ...response.repositoryInfo.reduce((acc, repo) => {
+ acc[repo.id] = repo;
+ return acc;
+ }, {} as Record),
+ },
+ numMatches: prev.numMatches + response.stats.actualMatchCount,
+ ...(isFirstMessage ? {
+ timeToFirstSearchResultMs: performance.now() - startTime,
+ } : {}),
+ }));
+ break;
+ case 'final':
+ setState(prev => ({
+ ...prev,
+ isExhaustive: response.isSearchExhaustive,
+ stats: response.accumulatedStats,
+ ...(isFirstMessage ? {
+ timeToFirstSearchResultMs: performance.now() - startTime,
+ } : {}),
+ }));
+ break;
+ case 'error':
+ throw new ServiceErrorException(response.error);
+ }
+
+ numMessagesProcessed++;
+ }
+ }
+
+ const timeToSearchCompletionMs = performance.now() - startTime;
+ setState(prev => {
+ // Cache the final results after the stream has completed.
+ searchCache.set(cacheKey, {
+ files: prev.files,
+ repoInfo: prev.repoInfo,
+ isExhaustive: prev.isExhaustive,
+ numMatches: prev.numMatches,
+ timeToFirstSearchResultMs: prev.timeToFirstSearchResultMs,
+ timeToSearchCompletionMs,
+ timestamp: Date.now(),
+ });
+ return {
+ ...prev,
+ timeToSearchCompletionMs,
+ isStreaming: false,
+ }
+ });
+
+ } catch (error) {
+ if ((error as Error).name === 'AbortError') {
+ return;
+ }
+
+ console.error(error);
+ Sentry.captureException(error);
+ const timeToSearchCompletionMs = performance.now() - startTime;
+ setState(prev => ({
+ ...prev,
+ isStreaming: false,
+ timeToSearchCompletionMs,
+ error: error instanceof Error ? error : null,
+ }));
+ }
+ }
+
+ search();
+
+ return () => {
+ cancel();
+ }
+ }, [
+ query,
+ matches,
+ contextLines,
+ whole,
+ isRegexEnabled,
+ isCaseSensitivityEnabled,
+ cancel,
+ ]);
+
+ return {
+ ...state,
+ cancel,
+ };
+}
\ No newline at end of file
diff --git a/packages/web/src/app/api/(client)/client.ts b/packages/web/src/app/api/(client)/client.ts
index 6b4b29772..08a01b5b4 100644
--- a/packages/web/src/app/api/(client)/client.ts
+++ b/packages/web/src/app/api/(client)/client.ts
@@ -4,10 +4,12 @@ import { ServiceError } from "@/lib/serviceError";
import { GetVersionResponse, GetReposResponse } from "@/lib/types";
import { isServiceError } from "@/lib/utils";
import {
- FileSourceResponse,
- FileSourceRequest,
SearchRequest,
SearchResponse,
+} from "@/features/search";
+import {
+ FileSourceRequest,
+ FileSourceResponse,
} from "@/features/search/types";
import {
FindRelatedSymbolsRequest,
diff --git a/packages/web/src/app/api/(server)/search/route.ts b/packages/web/src/app/api/(server)/search/route.ts
index 83a5e6a0c..92ba4f2a1 100644
--- a/packages/web/src/app/api/(server)/search/route.ts
+++ b/packages/web/src/app/api/(server)/search/route.ts
@@ -1,10 +1,9 @@
'use server';
-import { search } from "@/features/search/searchApi";
+import { search, searchRequestSchema } from "@/features/search";
import { isServiceError } from "@/lib/utils";
import { NextRequest } from "next/server";
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
-import { searchRequestSchema } from "@/features/search/schemas";
export const POST = async (request: NextRequest) => {
const body = await request.json();
@@ -14,8 +13,18 @@ export const POST = async (request: NextRequest) => {
schemaValidationError(parsed.error)
);
}
+
+ const {
+ query,
+ ...options
+ } = parsed.data;
- const response = await search(parsed.data);
+ const response = await search({
+ queryType: 'string',
+ query,
+ options,
+ });
+
if (isServiceError(response)) {
return serviceErrorResponse(response);
}
diff --git a/packages/web/src/app/api/(server)/source/route.ts b/packages/web/src/app/api/(server)/source/route.ts
index d64d701d5..2fb785a85 100644
--- a/packages/web/src/app/api/(server)/source/route.ts
+++ b/packages/web/src/app/api/(server)/source/route.ts
@@ -4,7 +4,7 @@ import { getFileSource } from "@/features/search/fileSourceApi";
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
import { isServiceError } from "@/lib/utils";
import { NextRequest } from "next/server";
-import { fileSourceRequestSchema } from "@/features/search/schemas";
+import { fileSourceRequestSchema } from "@/features/search/types";
export const POST = async (request: NextRequest) => {
const body = await request.json();
diff --git a/packages/web/src/app/api/(server)/stream_search/route.ts b/packages/web/src/app/api/(server)/stream_search/route.ts
new file mode 100644
index 000000000..03057978a
--- /dev/null
+++ b/packages/web/src/app/api/(server)/stream_search/route.ts
@@ -0,0 +1,39 @@
+'use server';
+
+import { streamSearch, searchRequestSchema } from '@/features/search';
+import { schemaValidationError, serviceErrorResponse } from '@/lib/serviceError';
+import { isServiceError } from '@/lib/utils';
+import { NextRequest } from 'next/server';
+
+export const POST = async (request: NextRequest) => {
+ const body = await request.json();
+ const parsed = await searchRequestSchema.safeParseAsync(body);
+
+ if (!parsed.success) {
+ return serviceErrorResponse(schemaValidationError(parsed.error));
+ }
+
+ const {
+ query,
+ ...options
+ } = parsed.data;
+
+ const stream = await streamSearch({
+ queryType: 'string',
+ query,
+ options,
+ });
+
+ if (isServiceError(stream)) {
+ return serviceErrorResponse(stream);
+ }
+
+ return new Response(stream, {
+ headers: {
+ 'Content-Type': 'text/event-stream',
+ 'Cache-Control': 'no-cache, no-transform',
+ 'Connection': 'keep-alive',
+ 'X-Accel-Buffering': 'no', // Disable nginx buffering if applicable
+ },
+ });
+};
diff --git a/packages/web/src/ee/features/codeNav/components/exploreMenu/referenceList.tsx b/packages/web/src/ee/features/codeNav/components/exploreMenu/referenceList.tsx
index e2febdd22..700ff691b 100644
--- a/packages/web/src/ee/features/codeNav/components/exploreMenu/referenceList.tsx
+++ b/packages/web/src/ee/features/codeNav/components/exploreMenu/referenceList.tsx
@@ -4,7 +4,7 @@ import { getBrowsePath } from "@/app/[domain]/browse/hooks/utils";
import { PathHeader } from "@/app/[domain]/components/pathHeader";
import { LightweightCodeHighlighter } from "@/app/[domain]/components/lightweightCodeHighlighter";
import { FindRelatedSymbolsResponse } from "@/features/codeNav/types";
-import { RepositoryInfo, SourceRange } from "@/features/search/types";
+import { RepositoryInfo, SourceRange } from "@/features/search";
import { useMemo, useRef } from "react";
import useCaptureEvent from "@/hooks/useCaptureEvent";
import { useVirtualizer } from "@tanstack/react-virtual";
diff --git a/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/symbolDefinitionPreview.tsx b/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/symbolDefinitionPreview.tsx
index 39e90c66d..a087273c6 100644
--- a/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/symbolDefinitionPreview.tsx
+++ b/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/symbolDefinitionPreview.tsx
@@ -2,7 +2,7 @@ import { Badge } from "@/components/ui/badge";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { LightweightCodeHighlighter } from "@/app/[domain]/components/lightweightCodeHighlighter";
import { useMemo } from "react";
-import { SourceRange } from "@/features/search/types";
+import { SourceRange } from "@/features/search";
interface SymbolDefinitionPreviewProps {
symbolDefinition: {
diff --git a/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/useHoveredOverSymbolInfo.ts b/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/useHoveredOverSymbolInfo.ts
index 03752820f..b8336a93c 100644
--- a/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/useHoveredOverSymbolInfo.ts
+++ b/packages/web/src/ee/features/codeNav/components/symbolHoverPopup/useHoveredOverSymbolInfo.ts
@@ -1,5 +1,5 @@
import { findSearchBasedSymbolDefinitions } from "@/app/api/(client)/client";
-import { SourceRange } from "@/features/search/types";
+import { SourceRange } from "@/features/search";
import { useDomain } from "@/hooks/useDomain";
import { unwrapServiceError } from "@/lib/utils";
import { useQuery } from "@tanstack/react-query";
diff --git a/packages/web/src/features/agents/review-agent/nodes/fetchFileContent.ts b/packages/web/src/features/agents/review-agent/nodes/fetchFileContent.ts
index d9903b728..b7c38c3c7 100644
--- a/packages/web/src/features/agents/review-agent/nodes/fetchFileContent.ts
+++ b/packages/web/src/features/agents/review-agent/nodes/fetchFileContent.ts
@@ -1,6 +1,6 @@
import { sourcebot_context, sourcebot_pr_payload } from "@/features/agents/review-agent/types";
import { getFileSource } from "@/features/search/fileSourceApi";
-import { fileSourceResponseSchema } from "@/features/search/schemas";
+import { fileSourceResponseSchema } from "@/features/search/types";
import { isServiceError } from "@/lib/utils";
import { createLogger } from "@sourcebot/shared";
diff --git a/packages/web/src/features/chat/tools.ts b/packages/web/src/features/chat/tools.ts
index ab2b2ee61..2d15a8c92 100644
--- a/packages/web/src/features/chat/tools.ts
+++ b/packages/web/src/features/chat/tools.ts
@@ -1,5 +1,5 @@
import { z } from "zod"
-import { search } from "@/features/search/searchApi"
+import { search } from "@/features/search"
import { InferToolInput, InferToolOutput, InferUITool, tool, ToolUIPart } from "ai";
import { isServiceError } from "@/lib/utils";
import { getFileSource } from "../search/fileSourceApi";
@@ -178,12 +178,15 @@ Multiple expressions can be or'd together with or, negated with -, or grouped wi
});
const response = await search({
+ queryType: 'string',
query,
- matches: limit ?? 100,
- // @todo: we can make this configurable.
- contextLines: 3,
- whole: false,
- // @todo(mt): handle multi-tenancy.
+ options: {
+ matches: limit ?? 100,
+ contextLines: 3,
+ whole: false,
+ isCaseSensitivityEnabled: true,
+ isRegexEnabled: true,
+ }
});
if (isServiceError(response)) {
@@ -219,11 +222,11 @@ export const searchReposTool = tool({
}),
execute: async ({ query, limit }) => {
const reposResponse = await getRepos();
-
+
if (isServiceError(reposResponse)) {
return reposResponse;
}
-
+
// Configure Fuse.js for fuzzy searching
const fuse = new Fuse(reposResponse, {
keys: [
@@ -234,7 +237,7 @@ export const searchReposTool = tool({
includeScore: true,
minMatchCharLength: 1,
});
-
+
const searchResults = fuse.search(query, { limit: limit ?? 10 });
searchResults.sort((a, b) => (a.score ?? 0) - (b.score ?? 0));
@@ -253,11 +256,11 @@ export const listAllReposTool = tool({
inputSchema: z.object({}),
execute: async () => {
const reposResponse = await getRepos();
-
+
if (isServiceError(reposResponse)) {
return reposResponse;
}
-
+
return reposResponse.map((repo) => repo.repoName);
}
});
diff --git a/packages/web/src/features/codeNav/api.ts b/packages/web/src/features/codeNav/api.ts
index 1865ee532..d721dbe98 100644
--- a/packages/web/src/features/codeNav/api.ts
+++ b/packages/web/src/features/codeNav/api.ts
@@ -1,13 +1,13 @@
import 'server-only';
import { sew } from "@/actions";
-import { searchResponseSchema } from "@/features/search/schemas";
-import { search } from "@/features/search/searchApi";
+import { search } from "@/features/search";
import { ServiceError } from "@/lib/serviceError";
import { isServiceError } from "@/lib/utils";
import { withOptionalAuthV2 } from "@/withAuthV2";
import { SearchResponse } from "../search/types";
import { FindRelatedSymbolsRequest, FindRelatedSymbolsResponse } from "./types";
+import { QueryIR } from '../search/ir';
// The maximum number of matches to return from the search API.
const MAX_REFERENCE_COUNT = 1000;
@@ -20,12 +20,37 @@ export const findSearchBasedSymbolReferences = async (props: FindRelatedSymbolsR
revisionName = "HEAD",
} = props;
- const query = `\\b${symbolName}\\b rev:${revisionName} ${getExpandedLanguageFilter(language)} case:yes`;
+ const languageFilter = getExpandedLanguageFilter(language);
+
+ const query: QueryIR = {
+ and: {
+ children: [
+ {
+ regexp: {
+ regexp: `\\b${symbolName}\\b`,
+ case_sensitive: true,
+ file_name: false,
+ content: true,
+ }
+ },
+ {
+ branch: {
+ pattern: revisionName,
+ exact: true,
+ }
+ },
+ languageFilter,
+ ]
+ }
+ }
const searchResult = await search({
+ queryType: 'ir',
query,
- matches: MAX_REFERENCE_COUNT,
- contextLines: 0,
+ options: {
+ matches: MAX_REFERENCE_COUNT,
+ contextLines: 0,
+ }
});
if (isServiceError(searchResult)) {
@@ -38,33 +63,62 @@ export const findSearchBasedSymbolReferences = async (props: FindRelatedSymbolsR
export const findSearchBasedSymbolDefinitions = async (props: FindRelatedSymbolsRequest): Promise => sew(() =>
withOptionalAuthV2(async () => {
- const {
- symbolName,
- language,
- revisionName = "HEAD",
- } = props;
+ const {
+ symbolName,
+ language,
+ revisionName = "HEAD",
+ } = props;
- const query = `sym:\\b${symbolName}\\b rev:${revisionName} ${getExpandedLanguageFilter(language)}`;
+ const languageFilter = getExpandedLanguageFilter(language);
+
+ const query: QueryIR = {
+ and: {
+ children: [
+ {
+ symbol: {
+ expr: {
+ regexp: {
+ regexp: `\\b${symbolName}\\b`,
+ case_sensitive: true,
+ file_name: false,
+ content: true,
+ }
+ },
+ }
+ },
+ {
+ branch: {
+ pattern: revisionName,
+ exact: true,
+ }
+ },
+ languageFilter,
+ ]
+ }
+ }
- const searchResult = await search({
- query,
+ const searchResult = await search({
+ queryType: 'ir',
+ query,
+ options: {
matches: MAX_REFERENCE_COUNT,
contextLines: 0,
- });
-
- if (isServiceError(searchResult)) {
- return searchResult;
}
+ });
- return parseRelatedSymbolsSearchResponse(searchResult);
+ if (isServiceError(searchResult)) {
+ return searchResult;
+ }
+
+ return parseRelatedSymbolsSearchResponse(searchResult);
}));
-const parseRelatedSymbolsSearchResponse = (searchResult: SearchResponse) => {
- const parser = searchResponseSchema.transform(async ({ files }) => ({
+const parseRelatedSymbolsSearchResponse = (searchResult: SearchResponse): FindRelatedSymbolsResponse => {
+ return {
stats: {
matchCount: searchResult.stats.actualMatchCount,
},
- files: files.flatMap((file) => {
+ files: searchResult.files.flatMap((file) => {
const chunks = file.chunks;
return {
@@ -82,20 +136,47 @@ const parseRelatedSymbolsSearchResponse = (searchResult: SearchResponse) => {
}
}).filter((file) => file.matches.length > 0),
repositoryInfo: searchResult.repositoryInfo
- }));
-
- return parser.parseAsync(searchResult);
+ };
}
// Expands the language filter to include all variants of the language.
-const getExpandedLanguageFilter = (language: string) => {
+const getExpandedLanguageFilter = (language: string): QueryIR => {
switch (language) {
case "TypeScript":
case "JavaScript":
case "JSX":
case "TSX":
- return `(lang:TypeScript or lang:JavaScript or lang:JSX or lang:TSX)`
+ return {
+ or: {
+ children: [
+ {
+ language: {
+ language: "TypeScript",
+ }
+ },
+ {
+ language: {
+ language: "JavaScript",
+ }
+ },
+ {
+ language: {
+ language: "JSX",
+ }
+ },
+ {
+ language: {
+ language: "TSX",
+ }
+ },
+ ]
+ },
+ }
default:
- return `lang:${language}`
+ return {
+ language: {
+ language: language,
+ },
+ }
}
}
\ No newline at end of file
diff --git a/packages/web/src/features/codeNav/types.ts b/packages/web/src/features/codeNav/types.ts
index 07f3cefd0..b1dace76c 100644
--- a/packages/web/src/features/codeNav/types.ts
+++ b/packages/web/src/features/codeNav/types.ts
@@ -1,5 +1,5 @@
import { z } from "zod";
-import { rangeSchema, repositoryInfoSchema } from "../search/schemas";
+import { rangeSchema, repositoryInfoSchema } from "../search/types";
export const findRelatedSymbolsRequestSchema = z.object({
symbolName: z.string(),
diff --git a/packages/web/src/features/search/README.md b/packages/web/src/features/search/README.md
new file mode 100644
index 000000000..ff34a2e80
--- /dev/null
+++ b/packages/web/src/features/search/README.md
@@ -0,0 +1,31 @@
+# `/search`
+
+Code search interface for Sourcebot.
+
+## Overview
+
+The search feature parses user queries into an intermediate representation (IR), which is then executed against Zoekt's gRPC search backend. Query parsing uses Lezer for syntax analysis.
+
+## Architecture
+
+**Query Flow:**
+1. User query string → Lezer parser (via `@sourcebot/query-language`)
+2. Lezer syntax tree → Query IR (Zoekt gRPC `Q` proto)
+3. Query IR → Zoekt backend → Search results
+
+## Files
+
+- **`index.ts`** - Public API exports for the search feature, including search functions and type definitions.
+
+- **`parser.ts`** - Parses query strings into the query IR using the Lezer parser from `@sourcebot/query-language`.
+
+- **`ir.ts`** - Defines the `QueryIR` type (internally the Zoekt gRPC `Q` proto) and provides utilities for traversing and querying the IR tree structure.
+
+- **`types.ts`** - TypeScript types and Zod schemas for search requests, responses, file matches, stats, and streaming results.
+
+- **`searchApi.ts`** - High-level search API that handles authentication, permission filtering, and orchestrates the query parsing and Zoekt backend calls.
+
+- **`zoektSearcher.ts`** - Low-level interface to the Zoekt gRPC backend. Handles request construction, streaming search, response transformation, and repository metadata resolution.
+
+- **`fileSourceApi.ts`** - Retrieves full file contents by executing a specialized search query against Zoekt for a specific file path.
+
diff --git a/packages/web/src/features/search/fileSourceApi.ts b/packages/web/src/features/search/fileSourceApi.ts
index edc346aaa..15cb93c32 100644
--- a/packages/web/src/features/search/fileSourceApi.ts
+++ b/packages/web/src/features/search/fileSourceApi.ts
@@ -1,29 +1,50 @@
import 'server-only';
-import escapeStringRegexp from "escape-string-regexp";
import { fileNotFound, ServiceError, unexpectedError } from "../../lib/serviceError";
import { FileSourceRequest, FileSourceResponse } from "./types";
import { isServiceError } from "../../lib/utils";
import { search } from "./searchApi";
import { sew } from "@/actions";
import { withOptionalAuthV2 } from "@/withAuthV2";
+import { QueryIR } from './ir';
// @todo (bkellam) #574 : We should really be using `git show :` to fetch file contents here.
// This will allow us to support permalinks to files at a specific revision that may not be indexed
-// by zoekt.
+// by zoekt. We should also refactor this out of the /search folder.
export const getFileSource = async ({ fileName, repository, branch }: FileSourceRequest): Promise => sew(() =>
withOptionalAuthV2(async () => {
- const escapedFileName = escapeStringRegexp(fileName);
- const escapedRepository = escapeStringRegexp(repository);
-
- let query = `file:${escapedFileName} repo:^${escapedRepository}$`;
- if (branch) {
- query = query.concat(` branch:${branch}`);
+ const query: QueryIR = {
+ and: {
+ children: [
+ {
+ repo: {
+ regexp: `^${repository}$`,
+ },
+ },
+ {
+ regexp: {
+ regexp: fileName,
+ case_sensitive: true,
+ file_name: true,
+ content: false
+ },
+ },
+ ...(branch ? [{
+ branch: {
+ pattern: branch,
+ exact: true,
+ },
+ }]: [])
+ ]
+ }
}
const searchResponse = await search({
+ queryType: 'ir',
query,
- matches: 1,
- whole: true,
+ options: {
+ matches: 1,
+ whole: true,
+ }
});
if (isServiceError(searchResponse)) {
diff --git a/packages/web/src/features/search/index.ts b/packages/web/src/features/search/index.ts
new file mode 100644
index 000000000..665e070eb
--- /dev/null
+++ b/packages/web/src/features/search/index.ts
@@ -0,0 +1,15 @@
+export { search, streamSearch } from './searchApi';
+export {
+ searchRequestSchema,
+} from './types';
+export type {
+ SourceRange,
+ SearchSymbol,
+ RepositoryInfo,
+ SearchRequest,
+ SearchResultFile,
+ SearchStats,
+ StreamedSearchResponse,
+ SearchResultChunk,
+ SearchResponse,
+} from './types';
\ No newline at end of file
diff --git a/packages/web/src/features/search/ir.ts b/packages/web/src/features/search/ir.ts
new file mode 100644
index 000000000..11c3cb41a
--- /dev/null
+++ b/packages/web/src/features/search/ir.ts
@@ -0,0 +1,209 @@
+import { Q as QueryIR } from '@/proto/zoekt/webserver/v1/Q';
+import { RawConfig } from '@/proto/zoekt/webserver/v1/RawConfig';
+import { Regexp } from '@/proto/zoekt/webserver/v1/Regexp';
+import { Symbol } from '@/proto/zoekt/webserver/v1/Symbol';
+import { Language } from '@/proto/zoekt/webserver/v1/Language';
+import { Repo } from '@/proto/zoekt/webserver/v1/Repo';
+import { RepoRegexp } from '@/proto/zoekt/webserver/v1/RepoRegexp';
+import { BranchesRepos } from '@/proto/zoekt/webserver/v1/BranchesRepos';
+import { RepoIds } from '@/proto/zoekt/webserver/v1/RepoIds';
+import { RepoSet } from '@/proto/zoekt/webserver/v1/RepoSet';
+import { FileNameSet } from '@/proto/zoekt/webserver/v1/FileNameSet';
+import { Type } from '@/proto/zoekt/webserver/v1/Type';
+import { Substring } from '@/proto/zoekt/webserver/v1/Substring';
+import { And } from '@/proto/zoekt/webserver/v1/And';
+import { Or } from '@/proto/zoekt/webserver/v1/Or';
+import { Not } from '@/proto/zoekt/webserver/v1/Not';
+import { Branch } from '@/proto/zoekt/webserver/v1/Branch';
+import { Boost } from '@/proto/zoekt/webserver/v1/Boost';
+
+export type {
+ QueryIR,
+}
+
+// Type guards for each query node type
+export const isRawConfigQuery = (query: QueryIR): query is QueryIR & { raw_config: RawConfig } => query.raw_config != null;
+export const isRegexpQuery = (query: QueryIR): query is QueryIR & { regexp: Regexp } => query.regexp != null;
+export const isSymbolQuery = (query: QueryIR): query is QueryIR & { symbol: Symbol } => query.symbol != null;
+export const isLanguageQuery = (query: QueryIR): query is QueryIR & { language: Language } => query.language != null;
+export const isConstQuery = (query: QueryIR): query is QueryIR & { const: boolean } => query.const != null;
+export const isRepoQuery = (query: QueryIR): query is QueryIR & { repo: Repo } => query.repo != null;
+export const isRepoRegexpQuery = (query: QueryIR): query is QueryIR & { repo_regexp: RepoRegexp } => query.repo_regexp != null;
+export const isBranchesReposQuery = (query: QueryIR): query is QueryIR & { branches_repos: BranchesRepos } => query.branches_repos != null;
+export const isRepoIdsQuery = (query: QueryIR): query is QueryIR & { repo_ids: RepoIds } => query.repo_ids != null;
+export const isRepoSetQuery = (query: QueryIR): query is QueryIR & { repo_set: RepoSet } => query.repo_set != null;
+export const isFileNameSetQuery = (query: QueryIR): query is QueryIR & { file_name_set: FileNameSet } => query.file_name_set != null;
+export const isTypeQuery = (query: QueryIR): query is QueryIR & { type: Type } => query.type != null;
+export const isSubstringQuery = (query: QueryIR): query is QueryIR & { substring: Substring } => query.substring != null;
+export const isAndQuery = (query: QueryIR): query is QueryIR & { and: And } => query.and != null;
+export const isOrQuery = (query: QueryIR): query is QueryIR & { or: Or } => query.or != null;
+export const isNotQuery = (query: QueryIR): query is QueryIR & { not: Not } => query.not != null;
+export const isBranchQuery = (query: QueryIR): query is QueryIR & { branch: Branch } => query.branch != null;
+export const isBoostQuery = (query: QueryIR): query is QueryIR & { boost: Boost } => query.boost != null;
+
+/**
+ * Visitor pattern for traversing a QueryIR tree.
+ * Return false from any method to stop traversal early.
+ */
+export type QueryVisitor = {
+ onRawConfig?: (query: QueryIR) => boolean | void;
+ onRegexp?: (query: QueryIR) => boolean | void;
+ onSymbol?: (query: QueryIR) => boolean | void;
+ onLanguage?: (query: QueryIR) => boolean | void;
+ onConst?: (query: QueryIR) => boolean | void;
+ onRepo?: (query: QueryIR) => boolean | void;
+ onRepoRegexp?: (query: QueryIR) => boolean | void;
+ onBranchesRepos?: (query: QueryIR) => boolean | void;
+ onRepoIds?: (query: QueryIR) => boolean | void;
+ onRepoSet?: (query: QueryIR) => boolean | void;
+ onFileNameSet?: (query: QueryIR) => boolean | void;
+ onType?: (query: QueryIR) => boolean | void;
+ onSubstring?: (query: QueryIR) => boolean | void;
+ onAnd?: (query: QueryIR) => boolean | void;
+ onOr?: (query: QueryIR) => boolean | void;
+ onNot?: (query: QueryIR) => boolean | void;
+ onBranch?: (query: QueryIR) => boolean | void;
+ onBoost?: (query: QueryIR) => boolean | void;
+};
+
+/**
+ * Traverses a QueryIR tree using the visitor pattern.
+ * @param query The query to traverse
+ * @param visitor An object with optional callback methods for each query type
+ * @returns false if traversal was stopped early, true otherwise
+ */
+export function traverseQueryIR(
+ query: QueryIR,
+ visitor: QueryVisitor
+): boolean {
+ let shouldContinue: boolean | void = true;
+
+ if (isRawConfigQuery(query)) {
+ shouldContinue = visitor.onRawConfig?.(query);
+
+ } else if (isRegexpQuery(query)) {
+ shouldContinue = visitor.onRegexp?.(query);
+
+ } else if (isSymbolQuery(query)) {
+ shouldContinue = visitor.onSymbol?.(query);
+ if (shouldContinue !== false && query.symbol.expr) {
+ shouldContinue = traverseQueryIR(query.symbol.expr, visitor);
+ }
+
+ } else if (isLanguageQuery(query)) {
+ shouldContinue = visitor.onLanguage?.(query);
+
+ } else if (isConstQuery(query)) {
+ shouldContinue = visitor.onConst?.(query);
+
+ } else if (isRepoQuery(query)) {
+ shouldContinue = visitor.onRepo?.(query);
+
+ } else if (isRepoRegexpQuery(query)) {
+ shouldContinue = visitor.onRepoRegexp?.(query);
+
+ } else if (isBranchesReposQuery(query)) {
+ shouldContinue = visitor.onBranchesRepos?.(query);
+
+ } else if (isRepoIdsQuery(query)) {
+ shouldContinue = visitor.onRepoIds?.(query);
+
+ } else if (isRepoSetQuery(query)) {
+ shouldContinue = visitor.onRepoSet?.(query);
+
+ } else if (isFileNameSetQuery(query)) {
+ shouldContinue = visitor.onFileNameSet?.(query);
+
+ } else if (isTypeQuery(query)) {
+ shouldContinue = visitor.onType?.(query);
+
+ } else if (isSubstringQuery(query)) {
+ shouldContinue = visitor.onSubstring?.(query);
+
+ } else if (isAndQuery(query)) {
+ shouldContinue = visitor.onAnd?.(query);
+ if (shouldContinue !== false && query.and.children) {
+ for (const child of query.and.children) {
+ if (!traverseQueryIR(child, visitor)) {
+ return false;
+ }
+ }
+ }
+
+ } else if (isOrQuery(query)) {
+ shouldContinue = visitor.onOr?.(query);
+ if (shouldContinue !== false && query.or.children) {
+ for (const child of query.or.children) {
+ if (!traverseQueryIR(child, visitor)) {
+ return false;
+ }
+ }
+ }
+
+ } else if (isNotQuery(query)) {
+ shouldContinue = visitor.onNot?.(query);
+ if (shouldContinue !== false && query.not.child) {
+ shouldContinue = traverseQueryIR(query.not.child, visitor);
+ }
+
+ } else if (isBranchQuery(query)) {
+ shouldContinue = visitor.onBranch?.(query);
+
+ } else if (isBoostQuery(query)) {
+ shouldContinue = visitor.onBoost?.(query);
+ if (shouldContinue !== false && query.boost.child) {
+ shouldContinue = traverseQueryIR(query.boost.child, visitor);
+ }
+ }
+
+ return shouldContinue !== false;
+}
+
+/**
+ * Finds a node in the query tree that matches the predicate.
+ * @param query The query to search
+ * @param predicate A function that returns true if the node matches
+ * @returns The first matching query node, or undefined if none found
+ */
+export function findInQueryIR(
+ query: QueryIR,
+ predicate: (query: QueryIR) => boolean
+): QueryIR | undefined {
+ let found: QueryIR | undefined;
+
+ traverseQueryIR(query, {
+ onRawConfig: (q) => { if (predicate(q)) { found = q; return false; } },
+ onRegexp: (q) => { if (predicate(q)) { found = q; return false; } },
+ onSymbol: (q) => { if (predicate(q)) { found = q; return false; } },
+ onLanguage: (q) => { if (predicate(q)) { found = q; return false; } },
+ onConst: (q) => { if (predicate(q)) { found = q; return false; } },
+ onRepo: (q) => { if (predicate(q)) { found = q; return false; } },
+ onRepoRegexp: (q) => { if (predicate(q)) { found = q; return false; } },
+ onBranchesRepos: (q) => { if (predicate(q)) { found = q; return false; } },
+ onRepoIds: (q) => { if (predicate(q)) { found = q; return false; } },
+ onRepoSet: (q) => { if (predicate(q)) { found = q; return false; } },
+ onFileNameSet: (q) => { if (predicate(q)) { found = q; return false; } },
+ onType: (q) => { if (predicate(q)) { found = q; return false; } },
+ onSubstring: (q) => { if (predicate(q)) { found = q; return false; } },
+ onAnd: (q) => { if (predicate(q)) { found = q; return false; } },
+ onOr: (q) => { if (predicate(q)) { found = q; return false; } },
+ onNot: (q) => { if (predicate(q)) { found = q; return false; } },
+ onBranch: (q) => { if (predicate(q)) { found = q; return false; } },
+ onBoost: (q) => { if (predicate(q)) { found = q; return false; } },
+ });
+
+ return found;
+}
+
+/**
+ * Checks if any node in the query tree matches the predicate.
+ * @param query The query to search
+ * @param predicate A function that returns true if the node matches
+ * @returns true if any node matches, false otherwise
+ */
+export function someInQueryIR(
+ query: QueryIR,
+ predicate: (query: QueryIR) => boolean
+): boolean {
+ return findInQueryIR(query, predicate) !== undefined;
+}
diff --git a/packages/web/src/features/search/parser.ts b/packages/web/src/features/search/parser.ts
new file mode 100644
index 000000000..21cf74732
--- /dev/null
+++ b/packages/web/src/features/search/parser.ts
@@ -0,0 +1,406 @@
+import { QueryIR } from './ir';
+import {
+ AndExpr,
+ ArchivedExpr,
+ ContentExpr,
+ ContextExpr,
+ FileExpr,
+ ForkExpr,
+ LangExpr,
+ NegateExpr,
+ OrExpr,
+ ParenExpr,
+ PrefixExpr,
+ Program,
+ RepoExpr,
+ RepoSetExpr,
+ RevisionExpr,
+ SymExpr,
+ SyntaxNode,
+ Term,
+ Tree,
+ VisibilityExpr,
+} from '@sourcebot/query-language';
+import { parser as _parser } from '@sourcebot/query-language';
+import { PrismaClient } from '@sourcebot/db';
+import { SINGLE_TENANT_ORG_ID } from '@/lib/constants';
+import { ServiceErrorException } from '@/lib/serviceError';
+import { StatusCodes } from 'http-status-codes';
+import { ErrorCode } from '@/lib/errorCodes';
+
+// Configure the parser to throw errors when encountering invalid syntax.
+const parser = _parser.configure({
+ strict: true,
+});
+
+type ArchivedValue = 'yes' | 'no' | 'only';
+type VisibilityValue = 'public' | 'private' | 'any';
+type ForkValue = 'yes' | 'no' | 'only';
+
+const isArchivedValue = (value: string): value is ArchivedValue => {
+ return value === 'yes' || value === 'no' || value === 'only';
+}
+
+const isVisibilityValue = (value: string): value is VisibilityValue => {
+ return value === 'public' || value === 'private' || value === 'any';
+}
+
+const isForkValue = (value: string): value is ForkValue => {
+ return value === 'yes' || value === 'no' || value === 'only';
+}
+
+/**
+ * Given a query string, parses it into the query intermediate representation.
+ */
+export const parseQuerySyntaxIntoIR = async ({
+ query,
+ options,
+ prisma,
+}: {
+ query: string,
+ options: {
+ isCaseSensitivityEnabled?: boolean;
+ isRegexEnabled?: boolean;
+ },
+ prisma: PrismaClient,
+}): Promise => {
+
+ try {
+ // First parse the query into a Lezer tree.
+ const tree = parser.parse(query);
+
+ // Then transform the tree into the intermediate representation.
+ return transformTreeToIR({
+ tree,
+ input: query,
+ isCaseSensitivityEnabled: options.isCaseSensitivityEnabled ?? false,
+ isRegexEnabled: options.isRegexEnabled ?? false,
+ onExpandSearchContext: async (contextName: string) => {
+ const context = await prisma.searchContext.findUnique({
+ where: {
+ name_orgId: {
+ name: contextName,
+ orgId: SINGLE_TENANT_ORG_ID,
+ }
+ },
+ include: {
+ repos: true,
+ }
+ });
+
+ if (!context) {
+ throw new Error(`Search context "${contextName}" not found`);
+ }
+
+ return context.repos.map((repo) => repo.name);
+ },
+ });
+ } catch (error) {
+ if (error instanceof SyntaxError) {
+ throw new ServiceErrorException({
+ statusCode: StatusCodes.BAD_REQUEST,
+ errorCode: ErrorCode.FAILED_TO_PARSE_QUERY,
+ message: `Failed to parse query "${query}" with message: ${error.message}`,
+ });
+ }
+ throw error;
+ }
+}
+
+/**
+ * Given a Lezer tree, transforms it into the query intermediate representation.
+ */
+const transformTreeToIR = async ({
+ tree,
+ input,
+ isCaseSensitivityEnabled,
+ isRegexEnabled,
+ onExpandSearchContext,
+}: {
+ tree: Tree;
+ input: string;
+ isCaseSensitivityEnabled: boolean;
+ isRegexEnabled: boolean;
+ onExpandSearchContext: (contextName: string) => Promise;
+}): Promise => {
+ const transformNode = async (node: SyntaxNode): Promise => {
+ switch (node.type.id) {
+ case Program: {
+ // Program wraps the actual query - transform its child
+ const child = node.firstChild;
+ if (!child) {
+ // Empty query - match nothing
+ return { const: false, query: "const" };
+ }
+ return transformNode(child);
+ }
+ case AndExpr:
+ return {
+ and: {
+ children: await Promise.all(getChildren(node).map(c => transformNode(c)))
+ },
+ query: "and"
+ }
+
+ case OrExpr:
+ return {
+ or: {
+ children: await Promise.all(getChildren(node).map(c => transformNode(c)))
+ },
+ query: "or"
+ };
+
+ case NegateExpr: {
+ // Find the child after the negate token
+ const negateChild = node.getChild("PrefixExpr") || node.getChild("ParenExpr");
+ if (!negateChild) {
+ throw new Error("NegateExpr missing child");
+ }
+ return {
+ not: {
+ child: await transformNode(negateChild)
+ },
+ query: "not"
+ };
+ }
+ case ParenExpr: {
+ // Parentheses just group - transform the inner query
+ const innerQuery = node.getChild("query") || node.firstChild;
+ if (!innerQuery) {
+ return { const: false, query: "const" };
+ }
+ return transformNode(innerQuery);
+ }
+ case PrefixExpr:
+ // PrefixExpr contains specific prefix types
+ return transformPrefixExpr(node);
+
+ case Term: {
+ const termText = input.substring(node.from, node.to).replace(/^"|"$/g, '');
+
+ return isRegexEnabled ? {
+ regexp: {
+ regexp: termText,
+ case_sensitive: isCaseSensitivityEnabled,
+ file_name: false,
+ content: true
+ },
+ query: "regexp"
+ } : {
+ substring: {
+ pattern: termText,
+ case_sensitive: isCaseSensitivityEnabled,
+ file_name: false,
+ content: true
+ },
+ query: "substring"
+ };
+ }
+ default:
+ console.warn(`Unhandled node type: ${node.type.name} (id: ${node.type.id})`);
+ return { const: true, query: "const" };
+ }
+ }
+
+ const transformPrefixExpr = async (node: SyntaxNode): Promise => {
+ // Find which specific prefix type this is
+ const prefixNode = node.firstChild;
+ if (!prefixNode) {
+ throw new Error("PrefixExpr has no child");
+ }
+
+ const prefixTypeId = prefixNode.type.id;
+
+ // Extract the full text (e.g., "file:test.js") and split on the colon
+ const fullText = input.substring(prefixNode.from, prefixNode.to);
+ const colonIndex = fullText.indexOf(':');
+ if (colonIndex === -1) {
+ throw new Error(`${prefixNode.type.name} missing colon`);
+ }
+
+ // Get the value part after the colon and remove quotes if present
+ const value = fullText.substring(colonIndex + 1).replace(/^"|"$/g, '');
+
+ switch (prefixTypeId) {
+ case FileExpr:
+ return {
+ regexp: {
+ regexp: value,
+ case_sensitive: isCaseSensitivityEnabled,
+ file_name: true,
+ content: false
+ },
+ query: "regexp"
+ };
+
+ case RepoExpr:
+ return {
+ repo: {
+ regexp: value
+ },
+ query: "repo"
+ };
+
+ case RevisionExpr:
+ return {
+ branch: {
+ // Special case - "*" means search all branches. Passing in a
+ // blank string will match all branches.
+ pattern: value === '*' ? "" : value,
+ exact: false
+ },
+ query: "branch"
+ };
+
+ case ContentExpr:
+ return {
+ substring: {
+ pattern: value,
+ case_sensitive: isCaseSensitivityEnabled,
+ file_name: false,
+ content: true
+ },
+ query: "substring"
+ };
+
+
+ case LangExpr:
+ return {
+ language: {
+ language: value
+ },
+ query: "language"
+ };
+
+ case SymExpr:
+ // Symbol search wraps a pattern
+ return {
+ symbol: {
+ expr: {
+ regexp: {
+ regexp: value,
+ case_sensitive: isCaseSensitivityEnabled,
+ file_name: false,
+ content: true
+ },
+ query: "regexp"
+ }
+ },
+ query: "symbol"
+ };
+
+ case VisibilityExpr: {
+ const rawValue = value.toLowerCase();
+
+ if (!isVisibilityValue(rawValue)) {
+ throw new Error(`Invalid visibility value: ${rawValue}. Expected 'public', 'private', or 'any'`);
+ }
+
+ const flags: ('FLAG_ONLY_PUBLIC' | 'FLAG_ONLY_PRIVATE')[] = [];
+
+ if (rawValue === 'any') {
+ // 'any' means no filter
+ } else if (rawValue === 'public') {
+ flags.push('FLAG_ONLY_PUBLIC');
+ } else if (rawValue === 'private') {
+ flags.push('FLAG_ONLY_PRIVATE');
+ }
+
+ return {
+ raw_config: {
+ flags
+ },
+ query: "raw_config"
+ };
+ }
+
+ case ArchivedExpr: {
+ const rawValue = value.toLowerCase();
+
+ if (!isArchivedValue(rawValue)) {
+ throw new Error(`Invalid archived value: ${rawValue}. Expected 'yes', 'no', or 'only'`);
+ }
+
+ const flags: ('FLAG_ONLY_ARCHIVED' | 'FLAG_NO_ARCHIVED')[] = [];
+
+ if (rawValue === 'yes') {
+ // 'yes' means include archived repositories (default)
+ } else if (rawValue === 'no') {
+ flags.push('FLAG_NO_ARCHIVED');
+ } else if (rawValue === 'only') {
+ flags.push('FLAG_ONLY_ARCHIVED');
+ }
+
+ return {
+ raw_config: {
+ flags
+ },
+ query: "raw_config"
+ };
+ }
+ case ForkExpr: {
+ const rawValue = value.toLowerCase();
+
+ if (!isForkValue(rawValue)) {
+ throw new Error(`Invalid fork value: ${rawValue}. Expected 'yes', 'no', or 'only'`);
+ }
+
+ const flags: ('FLAG_ONLY_FORKS' | 'FLAG_NO_FORKS')[] = [];
+
+ if (rawValue === 'yes') {
+ // 'yes' means include forks (default)
+ } else if (rawValue === 'no') {
+ flags.push('FLAG_NO_FORKS');
+ } else if (rawValue === 'only') {
+ flags.push('FLAG_ONLY_FORKS');
+ }
+
+ return {
+ raw_config: {
+ flags
+ },
+ query: "raw_config"
+ };
+ }
+
+ case ContextExpr: {
+ const repoNames = await onExpandSearchContext(value);
+ return {
+ repo_set: {
+ set: repoNames.reduce((acc, s) => {
+ acc[s.trim()] = true;
+ return acc;
+ }, {} as Record)
+ },
+ query: "repo_set"
+ };
+ }
+
+ case RepoSetExpr: {
+ return {
+ repo_set: {
+ set: value.split(',').reduce((acc, s) => {
+ acc[s.trim()] = true;
+ return acc;
+ }, {} as Record)
+ },
+ query: "repo_set"
+ };
+ }
+ default:
+ throw new Error(`Unknown prefix type: ${prefixNode.type.name} (id: ${prefixTypeId})`);
+ }
+ }
+
+ return transformNode(tree.topNode);
+}
+
+const getChildren = (node: SyntaxNode): SyntaxNode[] => {
+ const children: SyntaxNode[] = [];
+ let child = node.firstChild;
+ while (child) {
+ children.push(child);
+ child = child.nextSibling;
+ }
+ return children;
+}
diff --git a/packages/web/src/features/search/schemas.ts b/packages/web/src/features/search/schemas.ts
deleted file mode 100644
index 711c810d9..000000000
--- a/packages/web/src/features/search/schemas.ts
+++ /dev/null
@@ -1,163 +0,0 @@
-// @NOTE : Please keep this file in sync with @sourcebot/mcp/src/schemas.ts
-import { CodeHostType } from "@sourcebot/db";
-import { z } from "zod";
-
-export const locationSchema = z.object({
- // 0-based byte offset from the beginning of the file
- byteOffset: z.number(),
- // 1-based line number from the beginning of the file
- lineNumber: z.number(),
- // 1-based column number (in runes) from the beginning of line
- column: z.number(),
-});
-
-export const rangeSchema = z.object({
- start: locationSchema,
- end: locationSchema,
-});
-
-export const symbolSchema = z.object({
- symbol: z.string(),
- kind: z.string(),
-});
-
-export const searchRequestSchema = z.object({
- // The zoekt query to execute.
- query: z.string(),
- // The number of matches to return.
- matches: z.number(),
- // The number of context lines to return.
- contextLines: z.number().optional(),
- // Whether to return the whole file as part of the response.
- whole: z.boolean().optional(),
-});
-
-export const repositoryInfoSchema = z.object({
- id: z.number(),
- codeHostType: z.nativeEnum(CodeHostType),
- name: z.string(),
- displayName: z.string().optional(),
- webUrl: z.string().optional(),
-});
-
-// Many of these fields are defined in zoekt/api.go.
-export const searchStatsSchema = z.object({
- // The actual number of matches returned by the search.
- // This will always be less than or equal to `totalMatchCount`.
- actualMatchCount: z.number(),
-
- // The total number of matches found during the search.
- totalMatchCount: z.number(),
-
- // The duration (in nanoseconds) of the search.
- duration: z.number(),
-
- // Number of files containing a match.
- fileCount: z.number(),
-
- // Candidate files whose contents weren't examined because we
- // gathered enough matches.
- filesSkipped: z.number(),
-
- // Amount of I/O for reading contents.
- contentBytesLoaded: z.number(),
-
- // Amount of I/O for reading from index.
- indexBytesLoaded: z.number(),
-
- // Number of search shards that had a crash.
- crashes: z.number(),
-
- // Number of files in shards that we considered.
- shardFilesConsidered: z.number(),
-
- // Files that we evaluated. Equivalent to files for which all
- // atom matches (including negations) evaluated to true.
- filesConsidered: z.number(),
-
- // Files for which we loaded file content to verify substring matches
- filesLoaded: z.number(),
-
- // Shards that we scanned to find matches.
- shardsScanned: z.number(),
-
- // Shards that we did not process because a query was canceled.
- shardsSkipped: z.number(),
-
- // Shards that we did not process because the query was rejected by the
- // ngram filter indicating it had no matches.
- shardsSkippedFilter: z.number(),
-
- // Number of candidate matches as a result of searching ngrams.
- ngramMatches: z.number(),
-
- // NgramLookups is the number of times we accessed an ngram in the index.
- ngramLookups: z.number(),
-
- // Wall clock time for queued search.
- wait: z.number(),
-
- // Aggregate wall clock time spent constructing and pruning the match tree.
- // This accounts for time such as lookups in the trigram index.
- matchTreeConstruction: z.number(),
-
- // Aggregate wall clock time spent searching the match tree. This accounts
- // for the bulk of search work done looking for matches.
- matchTreeSearch: z.number(),
-
- // Number of times regexp was called on files that we evaluated.
- regexpsConsidered: z.number(),
-
- // FlushReason explains why results were flushed.
- flushReason: z.number(),
-});
-
-export const searchResponseSchema = z.object({
- stats: searchStatsSchema,
- files: z.array(z.object({
- fileName: z.object({
- // The name of the file
- text: z.string(),
- // Any matching ranges
- matchRanges: z.array(rangeSchema),
- }),
- webUrl: z.string().optional(),
- repository: z.string(),
- repositoryId: z.number(),
- language: z.string(),
- chunks: z.array(z.object({
- content: z.string(),
- matchRanges: z.array(rangeSchema),
- contentStart: locationSchema,
- symbols: z.array(z.object({
- ...symbolSchema.shape,
- parent: symbolSchema.optional(),
- })).optional(),
- })),
- branches: z.array(z.string()).optional(),
- // Set if `whole` is true.
- content: z.string().optional(),
- })),
- repositoryInfo: z.array(repositoryInfoSchema),
- isBranchFilteringEnabled: z.boolean(),
- isSearchExhaustive: z.boolean(),
- __debug_timings: z.record(z.string(), z.number()).optional(),
-});
-
-export const fileSourceRequestSchema = z.object({
- fileName: z.string(),
- repository: z.string(),
- branch: z.string().optional(),
-});
-
-export const fileSourceResponseSchema = z.object({
- source: z.string(),
- language: z.string(),
- path: z.string(),
- repository: z.string(),
- repositoryCodeHostType: z.nativeEnum(CodeHostType),
- repositoryDisplayName: z.string().optional(),
- repositoryWebUrl: z.string().optional(),
- branch: z.string().optional(),
- webUrl: z.string().optional(),
-});
\ No newline at end of file
diff --git a/packages/web/src/features/search/searchApi.ts b/packages/web/src/features/search/searchApi.ts
index 1ca57ef46..cf362c861 100644
--- a/packages/web/src/features/search/searchApi.ts
+++ b/packages/web/src/features/search/searchApi.ts
@@ -1,429 +1,87 @@
-import 'server-only';
import { sew } from "@/actions";
+import { getRepoPermissionFilterForUser } from "@/prisma";
import { withOptionalAuthV2 } from "@/withAuthV2";
-import { PrismaClient, Repo } from "@sourcebot/db";
-import { base64Decode, createLogger } from "@sourcebot/shared";
-import { StatusCodes } from "http-status-codes";
-import { ErrorCode } from "../../lib/errorCodes";
-import { invalidZoektResponse, ServiceError } from "../../lib/serviceError";
-import { isServiceError, measure } from "../../lib/utils";
-import { SearchRequest, SearchResponse, SourceRange } from "./types";
-import { zoektFetch } from "./zoektClient";
-import { ZoektSearchResponse } from "./zoektSchema";
+import { PrismaClient, UserWithAccounts } from "@sourcebot/db";
+import { createLogger, env, hasEntitlement } from "@sourcebot/shared";
+import { QueryIR } from './ir';
+import { parseQuerySyntaxIntoIR } from './parser';
+import { SearchOptions } from "./types";
+import { createZoektSearchRequest, zoektSearch, zoektStreamSearch } from './zoektSearcher';
const logger = createLogger("searchApi");
-// List of supported query prefixes in zoekt.
-// @see : https://github.com/sourcebot-dev/zoekt/blob/main/query/parse.go#L417
-enum zoektPrefixes {
- archived = "archived:",
- branchShort = "b:",
- branch = "branch:",
- caseShort = "c:",
- case = "case:",
- content = "content:",
- fileShort = "f:",
- file = "file:",
- fork = "fork:",
- public = "public:",
- repoShort = "r:",
- repo = "repo:",
- regex = "regex:",
- lang = "lang:",
- sym = "sym:",
- typeShort = "t:",
- type = "type:",
- reposet = "reposet:",
+type QueryStringSearchRequest = {
+ queryType: 'string';
+ query: string;
+ options: SearchOptions;
}
-const transformZoektQuery = async (query: string, orgId: number, prisma: PrismaClient): Promise => {
- const prevQueryParts = query.split(" ");
- const newQueryParts = [];
-
- for (const part of prevQueryParts) {
-
- // Handle mapping `rev:` and `revision:` to `branch:`
- if (part.match(/^-?(rev|revision):.+$/)) {
- const isNegated = part.startsWith("-");
- let revisionName = part.slice(part.indexOf(":") + 1);
-
- // Special case: `*` -> search all revisions.
- // In zoekt, providing a blank string will match all branches.
- // @see: https://github.com/sourcebot-dev/zoekt/blob/main/eval.go#L560-L562
- if (revisionName === "*") {
- revisionName = "";
- }
- newQueryParts.push(`${isNegated ? "-" : ""}${zoektPrefixes.branch}${revisionName}`);
- }
-
- // Expand `context:` into `reposet:` atom.
- else if (part.match(/^-?context:.+$/)) {
- const isNegated = part.startsWith("-");
- const contextName = part.slice(part.indexOf(":") + 1);
-
- const context = await prisma.searchContext.findUnique({
- where: {
- name_orgId: {
- name: contextName,
- orgId,
- }
- },
- include: {
- repos: true,
- }
- });
-
- // If the context doesn't exist, return an error.
- if (!context) {
- return {
- errorCode: ErrorCode.SEARCH_CONTEXT_NOT_FOUND,
- message: `Search context "${contextName}" not found`,
- statusCode: StatusCodes.NOT_FOUND,
- } satisfies ServiceError;
- }
-
- const names = context.repos.map((repo) => repo.name);
- newQueryParts.push(`${isNegated ? "-" : ""}${zoektPrefixes.reposet}${names.join(",")}`);
- }
-
- // no-op: add the original part to the new query parts.
- else {
- newQueryParts.push(part);
- }
- }
-
- return newQueryParts.join(" ");
+type QueryIRSearchRequest = {
+ queryType: 'ir';
+ query: QueryIR;
+ // Omit options that are specific to query syntax parsing.
+ options: Omit;
}
-// Extracts a repository file URL from a zoekt template, branch, and file name.
-const getFileWebUrl = (template: string, branch: string, fileName: string): string | undefined => {
- // This is a hacky parser for templates generated by
- // the go text/template package. Example template:
- // {{URLJoinPath "https://github.com/sourcebot-dev/sourcebot" "blob" .Version .Path}}
+type SearchRequest = QueryStringSearchRequest | QueryIRSearchRequest;
- if (!template.match(/^{{URLJoinPath\s.*}}(\?.+)?$/)) {
- return undefined;
- }
-
- const url =
- template.substring("{{URLJoinPath ".length, template.indexOf("}}"))
- .split(" ")
- .map((part) => {
- // remove wrapping quotes
- if (part.startsWith("\"")) part = part.substring(1);
- if (part.endsWith("\"")) part = part.substring(0, part.length - 1);
- // Replace variable references
- if (part == ".Version") part = branch;
- if (part == ".Path") part = fileName;
- return part;
- })
- .join("/");
-
- const optionalQueryParams =
- template.substring(template.indexOf("}}") + 2)
- .replace("{{.Version}}", branch)
- .replace("{{.Path}}", fileName);
-
- return encodeURI(url + optionalQueryParams);
-}
+export const search = (request: SearchRequest) => sew(() =>
+ withOptionalAuthV2(async ({ prisma, user }) => {
+ const repoSearchScope = await getAccessibleRepoNamesForUser({ user, prisma });
-export const search = async ({ query, matches, contextLines, whole }: SearchRequest): Promise => sew(() =>
- withOptionalAuthV2(async ({ org, prisma }) => {
- const transformedQuery = await transformZoektQuery(query, org.id, prisma);
- if (isServiceError(transformedQuery)) {
- return transformedQuery;
- }
- query = transformedQuery;
-
- const isBranchFilteringEnabled = (
- query.includes(zoektPrefixes.branch) ||
- query.includes(zoektPrefixes.branchShort)
- );
-
- // We only want to show matches for the default branch when
- // the user isn't explicitly filtering by branch.
- if (!isBranchFilteringEnabled) {
- query = query.concat(` branch:HEAD`);
- }
+ // If needed, parse the query syntax into the query intermediate representation.
+ const query = request.queryType === 'string' ? await parseQuerySyntaxIntoIR({
+ query: request.query,
+ options: request.options,
+ prisma,
+ }) : request.query;
- const body = JSON.stringify({
- q: query,
- // @see: https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L892
- opts: {
- ChunkMatches: true,
- // @note: Zoekt has several different ways to limit a given search. The two that
- // we care about are `MaxMatchDisplayCount` and `TotalMaxMatchCount`:
- // - `MaxMatchDisplayCount` truncates the number of matches AFTER performing
- // a search (specifically, after collating and sorting the results). The number of
- // results returned by the API will be less than or equal to this value.
- //
- // - `TotalMaxMatchCount` truncates the number of matches DURING a search. The results
- // returned by the API the API can be less than, equal to, or greater than this value.
- // Why greater? Because this value is compared _after_ a given shard has finished
- // being processed, the number of matches returned by the last shard may have exceeded
- // this value.
- //
- // Let's define two variables:
- // - `actualMatchCount` : The number of matches that are returned by the API. This is
- // always less than or equal to `MaxMatchDisplayCount`.
- // - `totalMatchCount` : The number of matches that zoekt found before it either
- // 1) found all matches or 2) hit the `TotalMaxMatchCount` limit. This number is
- // not bounded and can be less than, equal to, or greater than both `TotalMaxMatchCount`
- // and `MaxMatchDisplayCount`.
- //
- //
- // Our challenge is to determine whether or not the search returned all possible matches/
- // (it was exaustive) or if it was truncated. By setting the `TotalMaxMatchCount` to
- // `MaxMatchDisplayCount + 1`, we can determine which of these occurred by comparing
- // `totalMatchCount` to `MaxMatchDisplayCount`.
- //
- // if (totalMatchCount ≤ actualMatchCount):
- // Search is EXHAUSTIVE (found all possible matches)
- // Proof: totalMatchCount ≤ MaxMatchDisplayCount < TotalMaxMatchCount
- // Therefore Zoekt stopped naturally, not due to limit
- //
- // if (totalMatchCount > actualMatchCount):
- // Search is TRUNCATED (more matches exist)
- // Proof: totalMatchCount > MaxMatchDisplayCount + 1 = TotalMaxMatchCount
- // Therefore Zoekt hit the limit and stopped searching
- //
- MaxMatchDisplayCount: matches,
- TotalMaxMatchCount: matches + 1,
- NumContextLines: contextLines,
- Whole: !!whole,
- ShardMaxMatchCount: -1,
- MaxWallTime: 0, // zoekt expects a duration in nanoseconds
- }
+ const zoektSearchRequest = await createZoektSearchRequest({
+ query,
+ options: request.options,
+ repoSearchScope,
});
- let header: Record = {};
- header = {
- "X-Tenant-ID": org.id.toString()
- };
-
- const { data: searchResponse, durationMs: fetchDurationMs } = await measure(
- () => zoektFetch({
- path: "/api/search",
- body,
- header,
- method: "POST",
- }),
- "zoekt_fetch",
- false
- );
-
- if (!searchResponse.ok) {
- return invalidZoektResponse(searchResponse);
- }
-
- const transformZoektSearchResponse = async ({ Result }: ZoektSearchResponse) => {
- // @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
- // which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
- // metadata when transforming (potentially thousands) of file matches, we aggregate a unique
- // set of repository ids* and map them to their corresponding Repo record.
- //
- // *Q: Why is `RepositoryID` optional? And why are we falling back to `Repository`?
- // A: Prior to this change, the repository id was not plumbed into zoekt, so RepositoryID was
- // always undefined. To make this a non-breaking change, we fallback to using the repository's name
- // (`Repository`) as the identifier in these cases. This is not guaranteed to be unique, but in
- // practice it is since the repository name includes the host and path (e.g., 'github.com/org/repo',
- // 'gitea.com/org/repo', etc.).
- //
- // Note: When a repository is re-indexed (every hour) this ID will be populated.
- // @see: https://github.com/sourcebot-dev/zoekt/pull/6
- const repoIdentifiers = new Set(Result.Files?.map((file) => file.RepositoryID ?? file.Repository) ?? []);
- const repos = new Map();
-
- (await prisma.repo.findMany({
- where: {
- id: {
- in: Array.from(repoIdentifiers).filter((id) => typeof id === "number"),
- },
- orgId: org.id,
- }
- })).forEach(repo => repos.set(repo.id, repo));
-
- (await prisma.repo.findMany({
- where: {
- name: {
- in: Array.from(repoIdentifiers).filter((id) => typeof id === "string"),
- },
- orgId: org.id,
- }
- })).forEach(repo => repos.set(repo.name, repo));
-
- const files = Result.Files?.map((file) => {
- const fileNameChunks = file.ChunkMatches.filter((chunk) => chunk.FileName);
-
- const webUrl = (() => {
- const template: string | undefined = Result.RepoURLs[file.Repository];
- if (!template) {
- return undefined;
- }
-
- // If there are multiple branches pointing to the same revision of this file, it doesn't
- // matter which branch we use here, so use the first one.
- const branch = file.Branches && file.Branches.length > 0 ? file.Branches[0] : "HEAD";
- return getFileWebUrl(template, branch, file.FileName);
- })();
-
- const identifier = file.RepositoryID ?? file.Repository;
- const repo = repos.get(identifier);
-
- // This can happen if the user doesn't have access to the repository.
- if (!repo) {
- return undefined;
- }
+ return zoektSearch(zoektSearchRequest, prisma);
+ }));
- return {
- fileName: {
- text: file.FileName,
- matchRanges: fileNameChunks.length === 1 ? fileNameChunks[0].Ranges.map((range) => ({
- start: {
- byteOffset: range.Start.ByteOffset,
- column: range.Start.Column,
- lineNumber: range.Start.LineNumber,
- },
- end: {
- byteOffset: range.End.ByteOffset,
- column: range.End.Column,
- lineNumber: range.End.LineNumber,
- }
- })) : [],
- },
- repository: repo.name,
- repositoryId: repo.id,
- webUrl: webUrl,
- language: file.Language,
- chunks: file.ChunkMatches
- .filter((chunk) => !chunk.FileName) // Filter out filename chunks.
- .map((chunk) => {
- return {
- content: base64Decode(chunk.Content),
- matchRanges: chunk.Ranges.map((range) => ({
- start: {
- byteOffset: range.Start.ByteOffset,
- column: range.Start.Column,
- lineNumber: range.Start.LineNumber,
- },
- end: {
- byteOffset: range.End.ByteOffset,
- column: range.End.Column,
- lineNumber: range.End.LineNumber,
- }
- }) satisfies SourceRange),
- contentStart: {
- byteOffset: chunk.ContentStart.ByteOffset,
- column: chunk.ContentStart.Column,
- lineNumber: chunk.ContentStart.LineNumber,
- },
- symbols: chunk.SymbolInfo?.map((symbol) => {
- return {
- symbol: symbol.Sym,
- kind: symbol.Kind,
- parent: symbol.Parent.length > 0 ? {
- symbol: symbol.Parent,
- kind: symbol.ParentKind,
- } : undefined,
- }
- }) ?? undefined,
- }
- }),
- branches: file.Branches,
- content: file.Content ? base64Decode(file.Content) : undefined,
- }
- }).filter((file) => file !== undefined) ?? [];
+export const streamSearch = (request: SearchRequest) => sew(() =>
+ withOptionalAuthV2(async ({ prisma, user }) => {
+ const repoSearchScope = await getAccessibleRepoNamesForUser({ user, prisma });
+
+ // If needed, parse the query syntax into the query intermediate representation.
+ const query = request.queryType === 'string' ? await parseQuerySyntaxIntoIR({
+ query: request.query,
+ options: request.options,
+ prisma,
+ }) : request.query;
+
+ const zoektSearchRequest = await createZoektSearchRequest({
+ query,
+ options: request.options,
+ repoSearchScope,
+ });
- const actualMatchCount = files.reduce(
- (acc, file) =>
- // Match count is the sum of the number of chunk matches and file name matches.
- acc + file.chunks.reduce(
- (acc, chunk) => acc + chunk.matchRanges.length,
- 0,
- ) + file.fileName.matchRanges.length,
- 0,
- );
+ return zoektStreamSearch(zoektSearchRequest, prisma);
+ }));
- const totalMatchCount = Result.MatchCount;
- const isSearchExhaustive = totalMatchCount <= actualMatchCount;
+/**
+ * Returns a list of repository names that the user has access to.
+ * If permission syncing is disabled, returns undefined.
+ */
+const getAccessibleRepoNamesForUser = async ({ user, prisma }: { user?: UserWithAccounts, prisma: PrismaClient }) => {
+ if (
+ env.EXPERIMENT_EE_PERMISSION_SYNC_ENABLED !== 'true' ||
+ !hasEntitlement('permission-syncing')
+ ) {
+ return undefined;
+ }
- return {
- files,
- repositoryInfo: Array.from(repos.values()).map((repo) => ({
- id: repo.id,
- codeHostType: repo.external_codeHostType,
- name: repo.name,
- displayName: repo.displayName ?? undefined,
- webUrl: repo.webUrl ?? undefined,
- })),
- isBranchFilteringEnabled,
- isSearchExhaustive,
- stats: {
- actualMatchCount,
- totalMatchCount,
- duration: Result.Duration,
- fileCount: Result.FileCount,
- filesSkipped: Result.FilesSkipped,
- contentBytesLoaded: Result.ContentBytesLoaded,
- indexBytesLoaded: Result.IndexBytesLoaded,
- crashes: Result.Crashes,
- shardFilesConsidered: Result.ShardFilesConsidered,
- filesConsidered: Result.FilesConsidered,
- filesLoaded: Result.FilesLoaded,
- shardsScanned: Result.ShardsScanned,
- shardsSkipped: Result.ShardsSkipped,
- shardsSkippedFilter: Result.ShardsSkippedFilter,
- ngramMatches: Result.NgramMatches,
- ngramLookups: Result.NgramLookups,
- wait: Result.Wait,
- matchTreeConstruction: Result.MatchTreeConstruction,
- matchTreeSearch: Result.MatchTreeSearch,
- regexpsConsidered: Result.RegexpsConsidered,
- flushReason: Result.FlushReason,
- }
- } satisfies SearchResponse;
+ const accessibleRepos = await prisma.repo.findMany({
+ where: getRepoPermissionFilterForUser(user),
+ select: {
+ name: true,
}
-
- const { data: rawZoektResponse, durationMs: parseJsonDurationMs } = await measure(
- () => searchResponse.json(),
- "parse_json",
- false
- );
-
- // @note: We do not use zod parseAsync here since in cases where the
- // response is large (> 40MB), there can be significant performance issues.
- const zoektResponse = rawZoektResponse as ZoektSearchResponse;
-
- const { data: response, durationMs: transformZoektResponseDurationMs } = await measure(
- () => transformZoektSearchResponse(zoektResponse),
- "transform_zoekt_response",
- false
- );
-
- const totalDurationMs = fetchDurationMs + parseJsonDurationMs + transformZoektResponseDurationMs;
-
- // Debug log: timing breakdown
- const timings = [
- { name: "zoekt_fetch", duration: fetchDurationMs },
- { name: "parse_json", duration: parseJsonDurationMs },
- { name: "transform_zoekt_response", duration: transformZoektResponseDurationMs },
- ];
-
- logger.debug(`Search timing breakdown (query: "${query}"):`);
- timings.forEach(({ name, duration }) => {
- const percentage = ((duration / totalDurationMs) * 100).toFixed(1);
- const durationStr = duration.toFixed(2).padStart(8);
- const percentageStr = percentage.padStart(5);
- logger.debug(` ${name.padEnd(25)} ${durationStr}ms (${percentageStr}%)`);
- });
- logger.debug(` ${"TOTAL".padEnd(25)} ${totalDurationMs.toFixed(2).padStart(8)}ms (100.0%)`);
-
- return {
- ...response,
- __debug_timings: {
- zoekt_fetch: fetchDurationMs,
- parse_json: parseJsonDurationMs,
- transform_zoekt_response: transformZoektResponseDurationMs,
- }
- } satisfies SearchResponse;
- }));
+ });
+ return accessibleRepos.map(repo => repo.name);
+}
diff --git a/packages/web/src/features/search/types.ts b/packages/web/src/features/search/types.ts
index 2a238857c..90f501821 100644
--- a/packages/web/src/features/search/types.ts
+++ b/packages/web/src/features/search/types.ts
@@ -1,27 +1,164 @@
-// @NOTE : Please keep this file in sync with @sourcebot/mcp/src/types.ts
-import {
- fileSourceResponseSchema,
- locationSchema,
- searchRequestSchema,
- searchResponseSchema,
- rangeSchema,
- fileSourceRequestSchema,
- symbolSchema,
- repositoryInfoSchema,
- searchStatsSchema,
-} from "./schemas";
+import { CodeHostType } from "@sourcebot/db";
import { z } from "zod";
+import { serviceErrorSchema } from "@/lib/serviceError";
+export const locationSchema = z.object({
+ byteOffset: z.number(), // 0-based byte offset from the beginning of the file
+ lineNumber: z.number(), // 1-based line number from the beginning of the file
+ column: z.number(), // 1-based column number (in runes) from the beginning of line
+});
+export type SourceLocation = z.infer;
+
+export const rangeSchema = z.object({
+ start: locationSchema,
+ end: locationSchema,
+});
+export type SourceRange = z.infer;
+
+export const symbolSchema = z.object({
+ symbol: z.string(),
+ kind: z.string(),
+});
+export type SearchSymbol = z.infer;
+
+export const repositoryInfoSchema = z.object({
+ id: z.number(),
+ codeHostType: z.nativeEnum(CodeHostType),
+ name: z.string(),
+ displayName: z.string().optional(),
+ webUrl: z.string().optional(),
+});
+export type RepositoryInfo = z.infer;
+
+// @note: Many of these fields are defined in zoekt/api.go.
+export const searchStatsSchema = z.object({
+ actualMatchCount: z.number(), // The actual number of matches returned by the search. This will always be less than or equal to `totalMatchCount`.
+ totalMatchCount: z.number(), // The total number of matches found during the search.
+ duration: z.number(), // The duration (in nanoseconds) of the search.
+ fileCount: z.number(), // Number of files containing a match.
+ filesSkipped: z.number(), // Candidate files whose contents weren't examined because we gathered enough matches.
+ contentBytesLoaded: z.number(), // Amount of I/O for reading contents.
+ indexBytesLoaded: z.number(), // Amount of I/O for reading from index.
+ crashes: z.number(), // Number of search shards that had a crash.
+ shardFilesConsidered: z.number(), // Number of files in shards that we considered.
+ filesConsidered: z.number(), // Files that we evaluated. Equivalent to files for which all atom matches (including negations) evaluated to true.
+ filesLoaded: z.number(), // Files for which we loaded file content to verify substring matches
+ shardsScanned: z.number(), // Shards that we scanned to find matches.
+ shardsSkipped: z.number(), // Shards that we did not process because a query was canceled.
+ shardsSkippedFilter: z.number(), // Shards that we did not process because the query was rejected by the ngram filter indicating it had no matches.
+ ngramMatches: z.number(), // Number of candidate matches as a result of searching ngrams.
+ ngramLookups: z.number(), // NgramLookups is the number of times we accessed an ngram in the index.
+ wait: z.number(), // Wall clock time for queued search.
+ matchTreeConstruction: z.number(), // Aggregate wall clock time spent constructing and pruning the match tree. This accounts for time such as lookups in the trigram index.
+ matchTreeSearch: z.number(), // Aggregate wall clock time spent searching the match tree. This accounts for the bulk of search work done looking for matches.
+ regexpsConsidered: z.number(), // Number of times regexp was called on files that we evaluated.
+ flushReason: z.string(), // FlushReason explains why results were flushed.
+});
+export type SearchStats = z.infer;
+
+export const searchFileSchema = z.object({
+ fileName: z.object({
+ // The name of the file
+ text: z.string(),
+ // Any matching ranges
+ matchRanges: z.array(rangeSchema),
+ }),
+ webUrl: z.string().optional(),
+ repository: z.string(),
+ repositoryId: z.number(),
+ language: z.string(),
+ chunks: z.array(z.object({
+ content: z.string(),
+ matchRanges: z.array(rangeSchema),
+ contentStart: locationSchema,
+ symbols: z.array(z.object({
+ ...symbolSchema.shape,
+ parent: symbolSchema.optional(),
+ })).optional(),
+ })),
+ branches: z.array(z.string()).optional(),
+ // Set if `whole` is true.
+ content: z.string().optional(),
+});
+export type SearchResultFile = z.infer;
+export type SearchResultChunk = SearchResultFile["chunks"][number];
+
+export const searchOptionsSchema = z.object({
+ matches: z.number(), // The number of matches to return.
+ contextLines: z.number().optional(), // The number of context lines to return.
+ whole: z.boolean().optional(), // Whether to return the whole file as part of the response.
+ isRegexEnabled: z.boolean().optional(), // Whether to enable regular expression search.
+ isCaseSensitivityEnabled: z.boolean().optional(), // Whether to enable case sensitivity.
+});
+export type SearchOptions = z.infer;
+
+export const searchRequestSchema = z.object({
+ query: z.string(), // The zoekt query to execute.
+ ...searchOptionsSchema.shape,
+});
export type SearchRequest = z.infer;
+
+export const searchResponseSchema = z.object({
+ stats: searchStatsSchema,
+ files: z.array(searchFileSchema),
+ repositoryInfo: z.array(repositoryInfoSchema),
+ isSearchExhaustive: z.boolean(),
+});
export type SearchResponse = z.infer;
-export type SearchResultLocation = z.infer;
-export type SearchResultFile = SearchResponse["files"][number];
-export type SearchResultChunk = SearchResultFile["chunks"][number];
-export type SearchSymbol = z.infer;
+/**
+ * Sent after each chunk of results is processed.
+ */
+export const streamedSearchChunkResponseSchema = z.object({
+ type: z.literal('chunk'),
+ stats: searchStatsSchema,
+ files: z.array(searchFileSchema),
+ repositoryInfo: z.array(repositoryInfoSchema),
+});
+export type StreamedSearchChunkResponse = z.infer;
+
+/**
+ * Sent after the search is complete.
+ */
+export const streamedSearchFinalResponseSchema = z.object({
+ type: z.literal('final'),
+ accumulatedStats: searchStatsSchema,
+ isSearchExhaustive: z.boolean(),
+});
+export type StreamedSearchFinalResponse = z.infer;
+
+/**
+ * Sent when an error occurs during streaming.
+ */
+export const streamedSearchErrorResponseSchema = z.object({
+ type: z.literal('error'),
+ error: serviceErrorSchema,
+});
+export type StreamedSearchErrorResponse = z.infer;
+
+export const streamedSearchResponseSchema = z.discriminatedUnion('type', [
+ streamedSearchChunkResponseSchema,
+ streamedSearchFinalResponseSchema,
+ streamedSearchErrorResponseSchema,
+]);
+export type StreamedSearchResponse = z.infer;
+
+export const fileSourceRequestSchema = z.object({
+ fileName: z.string(),
+ repository: z.string(),
+ branch: z.string().optional(),
+});
export type FileSourceRequest = z.infer;
-export type FileSourceResponse = z.infer;
-export type RepositoryInfo = z.infer;
-export type SourceRange = z.infer;
-export type SearchStats = z.infer;
\ No newline at end of file
+export const fileSourceResponseSchema = z.object({
+ source: z.string(),
+ language: z.string(),
+ path: z.string(),
+ repository: z.string(),
+ repositoryCodeHostType: z.nativeEnum(CodeHostType),
+ repositoryDisplayName: z.string().optional(),
+ repositoryWebUrl: z.string().optional(),
+ branch: z.string().optional(),
+ webUrl: z.string().optional(),
+});
+export type FileSourceResponse = z.infer;
diff --git a/packages/web/src/features/search/zoektClient.ts b/packages/web/src/features/search/zoektClient.ts
deleted file mode 100644
index bd30fcd14..000000000
--- a/packages/web/src/features/search/zoektClient.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { env } from "@sourcebot/shared";
-
-interface ZoektRequest {
- path: string,
- body: string,
- method: string,
- header?: Record,
- cache?: RequestCache,
-}
-
-export const zoektFetch = async ({
- path,
- body,
- method,
- header,
- cache,
-}: ZoektRequest) => {
- const response = await fetch(
- new URL(path, env.ZOEKT_WEBSERVER_URL),
- {
- method,
- headers: {
- ...header,
- "Content-Type": "application/json",
- },
- body,
- cache,
- }
- );
-
- // @todo : add metrics
-
- return response;
-}
\ No newline at end of file
diff --git a/packages/web/src/features/search/zoektSchema.ts b/packages/web/src/features/search/zoektSchema.ts
deleted file mode 100644
index c4f37e38f..000000000
--- a/packages/web/src/features/search/zoektSchema.ts
+++ /dev/null
@@ -1,135 +0,0 @@
-
-import { z } from "zod";
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L212
-export const zoektLocationSchema = z.object({
- // 0-based byte offset from the beginning of the file
- ByteOffset: z.number(),
- // 1-based line number from the beginning of the file
- LineNumber: z.number(),
- // 1-based column number (in runes) from the beginning of line
- Column: z.number(),
-});
-
-export const zoektRangeSchema = z.object({
- Start: zoektLocationSchema,
- End: zoektLocationSchema,
-});
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L350
-export const zoektSearchResponseStats = {
- ContentBytesLoaded: z.number(),
- IndexBytesLoaded: z.number(),
- Crashes: z.number(),
- Duration: z.number(),
- FileCount: z.number(),
- ShardFilesConsidered: z.number(),
- FilesConsidered: z.number(),
- FilesLoaded: z.number(),
- FilesSkipped: z.number(),
- ShardsScanned: z.number(),
- ShardsSkipped: z.number(),
- ShardsSkippedFilter: z.number(),
- MatchCount: z.number(),
- NgramMatches: z.number(),
- NgramLookups: z.number(),
- Wait: z.number(),
- MatchTreeConstruction: z.number(),
- MatchTreeSearch: z.number(),
- RegexpsConsidered: z.number(),
- FlushReason: z.number(),
-}
-
-export const zoektSymbolSchema = z.object({
- Sym: z.string(),
- Kind: z.string(),
- Parent: z.string(),
- ParentKind: z.string(),
-});
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L497
-export const zoektSearchResponseSchema = z.object({
- Result: z.object({
- ...zoektSearchResponseStats,
- Files: z.array(z.object({
- FileName: z.string(),
- Repository: z.string(),
- RepositoryID: z.number().optional(),
- Version: z.string().optional(),
- Language: z.string(),
- Branches: z.array(z.string()).optional(),
- ChunkMatches: z.array(z.object({
- Content: z.string(),
- Ranges: z.array(zoektRangeSchema),
- FileName: z.boolean(),
- ContentStart: zoektLocationSchema,
- Score: z.number(),
- SymbolInfo: z.array(zoektSymbolSchema).nullable(),
- })),
- Checksum: z.string(),
- Score: z.number(),
- // Set if `whole` is true.
- Content: z.string().optional(),
- })).nullable(),
- RepoURLs: z.record(z.string(), z.string()),
- }),
-});
-
-export type ZoektSearchResponse = z.infer;
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L728
-const zoektRepoStatsSchema = z.object({
- Repos: z.number(),
- Shards: z.number(),
- Documents: z.number(),
- IndexBytes: z.number(),
- ContentBytes: z.number(),
- NewLinesCount: z.number(),
- DefaultBranchNewLinesCount: z.number(),
- OtherBranchesNewLinesCount: z.number(),
-});
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L716
-const zoektIndexMetadataSchema = z.object({
- IndexFormatVersion: z.number(),
- IndexFeatureVersion: z.number(),
- IndexMinReaderVersion: z.number(),
- IndexTime: z.string(),
- PlainASCII: z.boolean(),
- LanguageMap: z.record(z.string(), z.number()),
- ZoektVersion: z.string(),
- ID: z.string(),
-});
-
-
-// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L555
-export const zoektRepositorySchema = z.object({
- Name: z.string(),
- URL: z.string(),
- Source: z.string(),
- Branches: z.array(z.object({
- Name: z.string(),
- Version: z.string(),
- })).nullable(),
- CommitURLTemplate: z.string(),
- FileURLTemplate: z.string(),
- LineFragmentTemplate: z.string(),
- RawConfig: z.record(z.string(), z.string()).nullable(),
- Rank: z.number(),
- IndexOptions: z.string(),
- HasSymbols: z.boolean(),
- Tombstone: z.boolean(),
- LatestCommitDate: z.string(),
- FileTombstones: z.string().optional(),
-});
-
-export const zoektListRepositoriesResponseSchema = z.object({
- List: z.object({
- Repos: z.array(z.object({
- Repository: zoektRepositorySchema,
- IndexMetadata: zoektIndexMetadataSchema,
- Stats: zoektRepoStatsSchema,
- })),
- Stats: zoektRepoStatsSchema,
- })
-});
\ No newline at end of file
diff --git a/packages/web/src/features/search/zoektSearcher.ts b/packages/web/src/features/search/zoektSearcher.ts
new file mode 100644
index 000000000..5bdea8bce
--- /dev/null
+++ b/packages/web/src/features/search/zoektSearcher.ts
@@ -0,0 +1,579 @@
+import { getCodeHostBrowseFileAtBranchUrl } from "@/lib/utils";
+import { unexpectedError } from "@/lib/serviceError";
+import type { ProtoGrpcType } from '@/proto/webserver';
+import { FileMatch__Output as ZoektGrpcFileMatch } from "@/proto/zoekt/webserver/v1/FileMatch";
+import { FlushReason as ZoektGrpcFlushReason } from "@/proto/zoekt/webserver/v1/FlushReason";
+import { Range__Output as ZoektGrpcRange } from "@/proto/zoekt/webserver/v1/Range";
+import type { SearchRequest as ZoektGrpcSearchRequest } from '@/proto/zoekt/webserver/v1/SearchRequest';
+import { SearchResponse__Output as ZoektGrpcSearchResponse } from "@/proto/zoekt/webserver/v1/SearchResponse";
+import { StreamSearchRequest as ZoektGrpcStreamSearchRequest } from "@/proto/zoekt/webserver/v1/StreamSearchRequest";
+import { StreamSearchResponse__Output as ZoektGrpcStreamSearchResponse } from "@/proto/zoekt/webserver/v1/StreamSearchResponse";
+import { WebserverServiceClient } from '@/proto/zoekt/webserver/v1/WebserverService';
+import * as grpc from '@grpc/grpc-js';
+import * as protoLoader from '@grpc/proto-loader';
+import * as Sentry from '@sentry/nextjs';
+import { PrismaClient, Repo } from "@sourcebot/db";
+import { createLogger, env } from "@sourcebot/shared";
+import path from 'path';
+import { isBranchQuery, QueryIR, someInQueryIR } from './ir';
+import { RepositoryInfo, SearchResponse, SearchResultFile, SearchStats, SourceRange, StreamedSearchErrorResponse, StreamedSearchResponse } from "./types";
+
+const logger = createLogger("zoekt-searcher");
+
+/**
+ * Creates a ZoektGrpcSearchRequest given a query IR.
+ */
+export const createZoektSearchRequest = async ({
+ query,
+ options,
+ repoSearchScope,
+}: {
+ query: QueryIR;
+ options: {
+ matches: number,
+ contextLines?: number,
+ whole?: boolean,
+ };
+ // Allows the caller to scope the search to a specific set of repositories.
+ repoSearchScope?: string[];
+}) => {
+ // Find if there are any `rev:` filters in the query.
+ const containsRevExpression = someInQueryIR(query, (q) => isBranchQuery(q));
+
+ const zoektSearchRequest: ZoektGrpcSearchRequest = {
+ query: {
+ and: {
+ children: [
+ query,
+ // If the query does not contain a `rev:` filter, we default to searching `HEAD`.
+ ...(!containsRevExpression ? [{
+ branch: {
+ pattern: 'HEAD',
+ exact: true,
+ }
+ }] : []),
+ ...(repoSearchScope ? [{
+ repo_set: {
+ set: repoSearchScope.reduce((acc, repo) => {
+ acc[repo] = true;
+ return acc;
+ }, {} as Record)
+ }
+ }] : []),
+ ]
+ }
+ },
+ opts: {
+ chunk_matches: true,
+ // @note: Zoekt has several different ways to limit a given search. The two that
+ // we care about are `MaxMatchDisplayCount` and `TotalMaxMatchCount`:
+ // - `MaxMatchDisplayCount` truncates the number of matches AFTER performing
+ // a search (specifically, after collating and sorting the results). The number of
+ // results returned by the API will be less than or equal to this value.
+ //
+ // - `TotalMaxMatchCount` truncates the number of matches DURING a search. The results
+ // returned by the API the API can be less than, equal to, or greater than this value.
+ // Why greater? Because this value is compared _after_ a given shard has finished
+ // being processed, the number of matches returned by the last shard may have exceeded
+ // this value.
+ //
+ // Let's define two variables:
+ // - `actualMatchCount` : The number of matches that are returned by the API. This is
+ // always less than or equal to `MaxMatchDisplayCount`.
+ // - `totalMatchCount` : The number of matches that zoekt found before it either
+ // 1) found all matches or 2) hit the `TotalMaxMatchCount` limit. This number is
+ // not bounded and can be less than, equal to, or greater than both `TotalMaxMatchCount`
+ // and `MaxMatchDisplayCount`.
+ //
+ //
+ // Our challenge is to determine whether or not the search returned all possible matches/
+ // (it was exaustive) or if it was truncated. By setting the `TotalMaxMatchCount` to
+ // `MaxMatchDisplayCount + 1`, we can determine which of these occurred by comparing
+ // `totalMatchCount` to `MaxMatchDisplayCount`.
+ //
+ // if (totalMatchCount ≤ actualMatchCount):
+ // Search is EXHAUSTIVE (found all possible matches)
+ // Proof: totalMatchCount ≤ MaxMatchDisplayCount < TotalMaxMatchCount
+ // Therefore Zoekt stopped naturally, not due to limit
+ //
+ // if (totalMatchCount > actualMatchCount):
+ // Search is TRUNCATED (more matches exist)
+ // Proof: totalMatchCount > MaxMatchDisplayCount + 1 = TotalMaxMatchCount
+ // Therefore Zoekt hit the limit and stopped searching
+ //
+ max_match_display_count: options.matches,
+ total_max_match_count: options.matches + 1,
+ num_context_lines: options.contextLines ?? 0,
+ whole: !!options.whole,
+ shard_max_match_count: -1,
+ max_wall_time: {
+ seconds: 0,
+ }
+ },
+ };
+
+ return zoektSearchRequest;
+}
+
+export const zoektSearch = async (searchRequest: ZoektGrpcSearchRequest, prisma: PrismaClient): Promise => {
+ const client = createGrpcClient();
+ const metadata = new grpc.Metadata();
+
+ return new Promise((resolve, reject) => {
+ client.Search(searchRequest, metadata, (error, response) => {
+ if (error || !response) {
+ reject(error || new Error('No response received'));
+ return;
+ }
+
+ (async () => {
+ try {
+ const reposMapCache = await createReposMapForChunk(response, new Map(), prisma);
+ const { stats, files, repositoryInfo } = await transformZoektSearchResponse(response, reposMapCache);
+
+ resolve({
+ stats,
+ files,
+ repositoryInfo,
+ isSearchExhaustive: stats.totalMatchCount <= stats.actualMatchCount,
+ } satisfies SearchResponse);
+ } catch (err) {
+ reject(err);
+ }
+ })();
+ });
+ });
+}
+
+export const zoektStreamSearch = async (searchRequest: ZoektGrpcSearchRequest, prisma: PrismaClient): Promise => {
+ const client = createGrpcClient();
+ let grpcStream: ReturnType | null = null;
+ let isStreamActive = true;
+ let pendingChunks = 0;
+ let accumulatedStats: SearchStats = {
+ actualMatchCount: 0,
+ totalMatchCount: 0,
+ duration: 0,
+ fileCount: 0,
+ filesSkipped: 0,
+ contentBytesLoaded: 0,
+ indexBytesLoaded: 0,
+ crashes: 0,
+ shardFilesConsidered: 0,
+ filesConsidered: 0,
+ filesLoaded: 0,
+ shardsScanned: 0,
+ shardsSkipped: 0,
+ shardsSkippedFilter: 0,
+ ngramMatches: 0,
+ ngramLookups: 0,
+ wait: 0,
+ matchTreeConstruction: 0,
+ matchTreeSearch: 0,
+ regexpsConsidered: 0,
+ flushReason: ZoektGrpcFlushReason.FLUSH_REASON_UNKNOWN_UNSPECIFIED,
+ };
+
+ return new ReadableStream({
+ async start(controller) {
+ const tryCloseController = () => {
+ if (!isStreamActive && pendingChunks === 0) {
+ const finalResponse: StreamedSearchResponse = {
+ type: 'final',
+ accumulatedStats,
+ isSearchExhaustive: accumulatedStats.totalMatchCount <= accumulatedStats.actualMatchCount,
+ }
+
+ controller.enqueue(encodeSSEREsponseChunk(finalResponse));
+ controller.enqueue(encodeSSEREsponseChunk('[DONE]'));
+ controller.close();
+ client.close();
+ logger.debug('SSE stream closed');
+ }
+ };
+
+ try {
+ const metadata = new grpc.Metadata();
+
+ const streamRequest: ZoektGrpcStreamSearchRequest = {
+ request: searchRequest,
+ };
+
+ grpcStream = client.StreamSearch(streamRequest, metadata);
+
+ // `_reposMapCache` is used to cache repository metadata across all chunks.
+ // This reduces the number of database queries required to transform file matches.
+ const _reposMapCache = new Map();
+
+ // Handle incoming data chunks
+ grpcStream.on('data', async (chunk: ZoektGrpcStreamSearchResponse) => {
+ if (!isStreamActive) {
+ logger.debug('SSE stream closed, skipping chunk');
+ return;
+ }
+
+ // Track that we're processing a chunk
+ pendingChunks++;
+
+ // grpcStream.on doesn't actually await on our handler, so we need to
+ // explicitly pause the stream here to prevent the stream from completing
+ // prior to our asynchronous work being completed.
+ grpcStream?.pause();
+
+ try {
+ if (!chunk.response_chunk) {
+ logger.warn('No response chunk received');
+ return;
+ }
+
+ const reposMapCache = await createReposMapForChunk(chunk.response_chunk, _reposMapCache, prisma);
+ const { stats, files, repositoryInfo } = await transformZoektSearchResponse(chunk.response_chunk, reposMapCache);
+
+ accumulatedStats = accumulateStats(accumulatedStats, stats);
+
+ const response: StreamedSearchResponse = {
+ type: 'chunk',
+ files,
+ repositoryInfo,
+ stats
+ }
+
+ controller.enqueue(encodeSSEREsponseChunk(response));
+ } catch (error) {
+ logger.error('Error processing chunk:', error);
+ Sentry.captureException(error);
+ isStreamActive = false;
+
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error processing chunk';
+ const errorResponse: StreamedSearchErrorResponse = {
+ type: 'error',
+ error: unexpectedError(errorMessage),
+ };
+ controller.enqueue(encodeSSEREsponseChunk(errorResponse));
+ } finally {
+ pendingChunks--;
+ grpcStream?.resume();
+
+ // @note: we were hitting "Controller is already closed" errors when calling
+ // `controller.enqueue` above for the last chunk. The reasoning was the event
+ // handler for 'end' was being invoked prior to the completion of the last chunk,
+ // resulting in the controller being closed prematurely. The workaround was to
+ // keep track of the number of pending chunks and only close the controller
+ // when there are no more chunks to process. We need to explicitly call
+ // `tryCloseController` since there _seems_ to be no ordering guarantees between
+ // the 'end' event handler and this callback.
+ tryCloseController();
+ }
+ });
+
+ // Handle stream completion
+ grpcStream.on('end', () => {
+ if (!isStreamActive) {
+ return;
+ }
+ isStreamActive = false;
+ tryCloseController();
+ });
+
+ // Handle errors
+ grpcStream.on('error', (error: grpc.ServiceError) => {
+ logger.error('gRPC stream error:', error);
+ Sentry.captureException(error);
+
+ if (!isStreamActive) {
+ return;
+ }
+ isStreamActive = false;
+
+ // Send properly typed error response
+ const errorResponse: StreamedSearchErrorResponse = {
+ type: 'error',
+ error: unexpectedError(error.details || error.message),
+ };
+ controller.enqueue(encodeSSEREsponseChunk(errorResponse));
+
+ controller.close();
+ client.close();
+ });
+ } catch (error) {
+ logger.error('Stream initialization error:', error);
+ Sentry.captureException(error);
+
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error';
+ const errorResponse: StreamedSearchErrorResponse = {
+ type: 'error',
+ error: unexpectedError(errorMessage),
+ };
+ controller.enqueue(encodeSSEREsponseChunk(errorResponse));
+
+ controller.close();
+ client.close();
+ }
+ },
+ cancel() {
+ logger.warn('SSE stream cancelled by client');
+ isStreamActive = false;
+
+ // Cancel the gRPC stream to stop receiving data
+ if (grpcStream) {
+ grpcStream.cancel();
+ }
+
+ client.close();
+ }
+ });
+}
+
+// Encodes a response chunk into a SSE-compatible format.
+const encodeSSEREsponseChunk = (response: object | string) => {
+ const data = typeof response === 'string' ? response : JSON.stringify(response);
+ return new TextEncoder().encode(`data: ${data}\n\n`);
+}
+
+// Creates a mapping between all repository ids in a given response
+// chunk. The mapping allows us to efficiently lookup repository metadata.
+const createReposMapForChunk = async (chunk: ZoektGrpcSearchResponse, reposMapCache: Map, prisma: PrismaClient): Promise> => {
+ const reposMap = new Map();
+ await Promise.all(chunk.files.map(async (file) => {
+ const id = getRepoIdForFile(file);
+
+ const repo = await (async () => {
+ // If it's in the cache, return the cached value.
+ if (reposMapCache.has(id)) {
+ return reposMapCache.get(id);
+ }
+
+ // Otherwise, query the database for the record.
+ const repo = typeof id === 'number' ?
+ await prisma.repo.findUnique({
+ where: {
+ id: id,
+ },
+ }) :
+ await prisma.repo.findFirst({
+ where: {
+ name: id,
+ },
+ });
+
+ // If a repository is found, cache it for future lookups.
+ if (repo) {
+ reposMapCache.set(id, repo);
+ }
+
+ return repo;
+ })();
+
+ // Only add the repository to the map if it was found.
+ if (repo) {
+ reposMap.set(id, repo);
+ }
+ }));
+
+ return reposMap;
+}
+
+const transformZoektSearchResponse = async (response: ZoektGrpcSearchResponse, reposMapCache: Map): Promise<{
+ stats: SearchStats,
+ files: SearchResultFile[],
+ repositoryInfo: RepositoryInfo[],
+}> => {
+ const files = response.files.map((file) => {
+ const fileNameChunks = file.chunk_matches.filter((chunk) => chunk.file_name);
+ const repoId = getRepoIdForFile(file);
+ const repo = reposMapCache.get(repoId);
+
+ // This should never happen.
+ if (!repo) {
+ throw new Error(`Repository not found for file: ${file.file_name}`);
+ }
+
+ // @todo: address "file_name might not be a valid UTF-8 string" warning.
+ const fileName = file.file_name.toString('utf-8');
+
+ const convertRange = (range: ZoektGrpcRange): SourceRange => ({
+ start: {
+ byteOffset: range.start?.byte_offset ?? 0,
+ column: range.start?.column ?? 1,
+ lineNumber: range.start?.line_number ?? 1,
+ },
+ end: {
+ byteOffset: range.end?.byte_offset ?? 0,
+ column: range.end?.column ?? 1,
+ lineNumber: range.end?.line_number ?? 1,
+ }
+ })
+
+ return {
+ fileName: {
+ text: fileName,
+ matchRanges: fileNameChunks.length === 1 ? fileNameChunks[0].ranges.map(convertRange) : [],
+ },
+ repository: repo.name,
+ repositoryId: repo.id,
+ language: file.language,
+ webUrl: getCodeHostBrowseFileAtBranchUrl({
+ webUrl: repo.webUrl,
+ codeHostType: repo.external_codeHostType,
+ // If a file has multiple branches, default to the first one.
+ branchName: file.branches?.[0] ?? 'HEAD',
+ filePath: fileName,
+ }),
+ chunks: file.chunk_matches
+ .filter((chunk) => !chunk.file_name) // filter out filename chunks.
+ .map((chunk) => {
+ return {
+ content: chunk.content.toString('utf-8'),
+ matchRanges: chunk.ranges.map(convertRange),
+ contentStart: chunk.content_start ? {
+ byteOffset: chunk.content_start.byte_offset,
+ column: chunk.content_start.column,
+ lineNumber: chunk.content_start.line_number,
+ } : {
+ byteOffset: 0,
+ column: 1,
+ lineNumber: 1,
+ },
+ symbols: chunk.symbol_info.map((symbol) => {
+ return {
+ symbol: symbol.sym,
+ kind: symbol.kind,
+ parent: symbol.parent ? {
+ symbol: symbol.parent,
+ kind: symbol.parent_kind,
+ } : undefined,
+ }
+ })
+ }
+ }),
+ branches: file.branches,
+ content: file.content ? file.content.toString('utf-8') : undefined,
+ }
+ }).filter(file => file !== undefined);
+
+ const actualMatchCount = files.reduce(
+ (acc, file) =>
+ // Match count is the sum of the number of chunk matches and file name matches.
+ acc + file.chunks.reduce(
+ (acc, chunk) => acc + chunk.matchRanges.length,
+ 0,
+ ) + file.fileName.matchRanges.length,
+ 0,
+ );
+
+ const stats: SearchStats = {
+ actualMatchCount,
+ totalMatchCount: response.stats?.match_count ?? 0,
+ duration: response.stats?.duration?.nanos ?? 0,
+ fileCount: response.stats?.file_count ?? 0,
+ filesSkipped: response.stats?.files_skipped ?? 0,
+ contentBytesLoaded: response.stats?.content_bytes_loaded ?? 0,
+ indexBytesLoaded: response.stats?.index_bytes_loaded ?? 0,
+ crashes: response.stats?.crashes ?? 0,
+ shardFilesConsidered: response.stats?.shard_files_considered ?? 0,
+ filesConsidered: response.stats?.files_considered ?? 0,
+ filesLoaded: response.stats?.files_loaded ?? 0,
+ shardsScanned: response.stats?.shards_scanned ?? 0,
+ shardsSkipped: response.stats?.shards_skipped ?? 0,
+ shardsSkippedFilter: response.stats?.shards_skipped_filter ?? 0,
+ ngramMatches: response.stats?.ngram_matches ?? 0,
+ ngramLookups: response.stats?.ngram_lookups ?? 0,
+ wait: response.stats?.wait?.nanos ?? 0,
+ matchTreeConstruction: response.stats?.match_tree_construction?.nanos ?? 0,
+ matchTreeSearch: response.stats?.match_tree_search?.nanos ?? 0,
+ regexpsConsidered: response.stats?.regexps_considered ?? 0,
+ flushReason: response.stats?.flush_reason?.toString() ?? ZoektGrpcFlushReason.FLUSH_REASON_UNKNOWN_UNSPECIFIED,
+ }
+
+ return {
+ files,
+ repositoryInfo: Array.from(reposMapCache.values()).map((repo) => ({
+ id: repo.id,
+ codeHostType: repo.external_codeHostType,
+ name: repo.name,
+ displayName: repo.displayName ?? undefined,
+ webUrl: repo.webUrl ?? undefined,
+ })),
+ stats,
+ }
+}
+
+// @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
+// which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
+// metadata when transforming (potentially thousands) of file matches, we aggregate a unique
+// set of repository ids* and map them to their corresponding Repo record.
+//
+// *Q: Why is `RepositoryID` optional? And why are we falling back to `Repository`?
+// A: Prior to this change, the repository id was not plumbed into zoekt, so RepositoryID was
+// always undefined. To make this a non-breaking change, we fallback to using the repository's name
+// (`Repository`) as the identifier in these cases. This is not guaranteed to be unique, but in
+// practice it is since the repository name includes the host and path (e.g., 'github.com/org/repo',
+// 'gitea.com/org/repo', etc.).
+//
+// Note: When a repository is re-indexed (every hour) this ID will be populated.
+// @see: https://github.com/sourcebot-dev/zoekt/pull/6
+const getRepoIdForFile = (file: ZoektGrpcFileMatch): string | number => {
+ return file.repository_id ?? file.repository;
+}
+
+const createGrpcClient = (): WebserverServiceClient => {
+ // Path to proto files - these should match your monorepo structure
+ const protoBasePath = path.join(process.cwd(), '../../vendor/zoekt/grpc/protos');
+ const protoPath = path.join(protoBasePath, 'zoekt/webserver/v1/webserver.proto');
+
+ const packageDefinition = protoLoader.loadSync(protoPath, {
+ keepCase: true,
+ longs: Number,
+ enums: String,
+ defaults: true,
+ oneofs: true,
+ includeDirs: [protoBasePath],
+ });
+
+ const proto = grpc.loadPackageDefinition(packageDefinition) as unknown as ProtoGrpcType;
+
+ // Extract host and port from ZOEKT_WEBSERVER_URL
+ const zoektUrl = new URL(env.ZOEKT_WEBSERVER_URL);
+ const grpcAddress = `${zoektUrl.hostname}:${zoektUrl.port}`;
+
+ return new proto.zoekt.webserver.v1.WebserverService(
+ grpcAddress,
+ grpc.credentials.createInsecure(),
+ {
+ 'grpc.max_receive_message_length': 500 * 1024 * 1024, // 500MB
+ 'grpc.max_send_message_length': 500 * 1024 * 1024, // 500MB
+ }
+ );
+}
+
+
+const accumulateStats = (a: SearchStats, b: SearchStats): SearchStats => {
+ return {
+ actualMatchCount: a.actualMatchCount + b.actualMatchCount,
+ totalMatchCount: a.totalMatchCount + b.totalMatchCount,
+ duration: a.duration + b.duration,
+ fileCount: a.fileCount + b.fileCount,
+ filesSkipped: a.filesSkipped + b.filesSkipped,
+ contentBytesLoaded: a.contentBytesLoaded + b.contentBytesLoaded,
+ indexBytesLoaded: a.indexBytesLoaded + b.indexBytesLoaded,
+ crashes: a.crashes + b.crashes,
+ shardFilesConsidered: a.shardFilesConsidered + b.shardFilesConsidered,
+ filesConsidered: a.filesConsidered + b.filesConsidered,
+ filesLoaded: a.filesLoaded + b.filesLoaded,
+ shardsScanned: a.shardsScanned + b.shardsScanned,
+ shardsSkipped: a.shardsSkipped + b.shardsSkipped,
+ shardsSkippedFilter: a.shardsSkippedFilter + b.shardsSkippedFilter,
+ ngramMatches: a.ngramMatches + b.ngramMatches,
+ ngramLookups: a.ngramLookups + b.ngramLookups,
+ wait: a.wait + b.wait,
+ matchTreeConstruction: a.matchTreeConstruction + b.matchTreeConstruction,
+ matchTreeSearch: a.matchTreeSearch + b.matchTreeSearch,
+ regexpsConsidered: a.regexpsConsidered + b.regexpsConsidered,
+ // Capture the first non-unknown flush reason.
+ ...(a.flushReason === ZoektGrpcFlushReason.FLUSH_REASON_UNKNOWN_UNSPECIFIED ? {
+ flushReason: b.flushReason
+ } : {
+ flushReason: a.flushReason,
+ }),
+ }
+}
\ No newline at end of file
diff --git a/packages/web/src/lib/errorCodes.ts b/packages/web/src/lib/errorCodes.ts
index 5e545200e..fc2abbc0c 100644
--- a/packages/web/src/lib/errorCodes.ts
+++ b/packages/web/src/lib/errorCodes.ts
@@ -34,4 +34,5 @@ export enum ErrorCode {
API_KEY_NOT_FOUND = 'API_KEY_NOT_FOUND',
INVALID_API_KEY = 'INVALID_API_KEY',
CHAT_IS_READONLY = 'CHAT_IS_READONLY',
+ FAILED_TO_PARSE_QUERY = 'FAILED_TO_PARSE_QUERY',
}
diff --git a/packages/web/src/lib/extensions/searchResultHighlightExtension.ts b/packages/web/src/lib/extensions/searchResultHighlightExtension.ts
index 58364f56d..d3f746467 100644
--- a/packages/web/src/lib/extensions/searchResultHighlightExtension.ts
+++ b/packages/web/src/lib/extensions/searchResultHighlightExtension.ts
@@ -1,6 +1,6 @@
import { EditorSelection, Extension, StateEffect, StateField, Text, Transaction } from "@codemirror/state";
import { Decoration, DecorationSet, EditorView } from "@codemirror/view";
-import { SourceRange } from "@/features/search/types";
+import { SourceRange } from "@/features/search";
const setMatchState = StateEffect.define<{
selectedMatchIndex: number,
diff --git a/packages/web/src/lib/posthogEvents.ts b/packages/web/src/lib/posthogEvents.ts
index 9ed40fd96..126ecf45e 100644
--- a/packages/web/src/lib/posthogEvents.ts
+++ b/packages/web/src/lib/posthogEvents.ts
@@ -5,7 +5,10 @@ export type PosthogEventMap = {
contentBytesLoaded: number,
indexBytesLoaded: number,
crashes: number,
+ /** @deprecated: use timeToFirstSearchResultMs and timeToSearchCompletionMs instead */
durationMs: number,
+ timeToFirstSearchResultMs: number,
+ timeToSearchCompletionMs: number,
fileCount: number,
shardFilesConsidered: number,
filesConsidered: number,
@@ -22,8 +25,9 @@ export type PosthogEventMap = {
matchTreeConstruction: number,
matchTreeSearch: number,
regexpsConsidered: number,
- flushReason: number,
- fileLanguages: string[]
+ flushReason: string,
+ fileLanguages: string[],
+ isSearchExhaustive: boolean
},
share_link_created: {},
////////////////////////////////////////////////////////////////
diff --git a/packages/web/src/lib/types.ts b/packages/web/src/lib/types.ts
index e27e7057f..cb6dc3c2f 100644
--- a/packages/web/src/lib/types.ts
+++ b/packages/web/src/lib/types.ts
@@ -9,6 +9,8 @@ export type GetVersionResponse = z.infer;
export enum SearchQueryParams {
query = "query",
matches = "matches",
+ isRegexEnabled = "isRegexEnabled",
+ isCaseSensitivityEnabled = "isCaseSensitivityEnabled",
}
export type ApiKeyPayload = {
diff --git a/packages/web/src/lib/utils.ts b/packages/web/src/lib/utils.ts
index 0d76eb3be..ea229977f 100644
--- a/packages/web/src/lib/utils.ts
+++ b/packages/web/src/lib/utils.ts
@@ -376,6 +376,42 @@ export const getCodeHostBrowseAtBranchUrl = ({
}
}
+export const getCodeHostBrowseFileAtBranchUrl = ({
+ webUrl,
+ codeHostType,
+ branchName,
+ filePath,
+}: {
+ webUrl?: string | null,
+ codeHostType: CodeHostType,
+ branchName: string,
+ filePath: string,
+}) => {
+ if (!webUrl) {
+ return undefined;
+ }
+
+ switch (codeHostType) {
+ case 'github':
+ return `${webUrl}/blob/${branchName}/${filePath}`;
+ case 'gitlab':
+ return `${webUrl}/-/blob/${branchName}/${filePath}`;
+ case 'gitea':
+ return `${webUrl}/src/branch/${branchName}/${filePath}`;
+ case 'azuredevops':
+ return `${webUrl}?path=${filePath}&version=${branchName}`;
+ case 'bitbucketCloud':
+ return `${webUrl}/src/${branchName}/${filePath}`;
+ case 'bitbucketServer':
+ return `${webUrl}/browse/${filePath}?at=${branchName}`;
+ case 'gerrit':
+ return `${webUrl}/+/${branchName}/${filePath}`;
+ case 'genericGitHost':
+ return undefined;
+
+ }
+}
+
export const isAuthSupportedForCodeHost = (codeHostType: CodeHostType): boolean => {
switch (codeHostType) {
case "github":
diff --git a/packages/web/src/prisma.ts b/packages/web/src/prisma.ts
index 0d520de78..1de13668a 100644
--- a/packages/web/src/prisma.ts
+++ b/packages/web/src/prisma.ts
@@ -1,6 +1,6 @@
import 'server-only';
import { env, getDBConnectionString } from "@sourcebot/shared";
-import { Prisma, PrismaClient } from "@sourcebot/db";
+import { Prisma, PrismaClient, UserWithAccounts } from "@sourcebot/db";
import { hasEntitlement } from "@sourcebot/shared";
// @see: https://authjs.dev/getting-started/adapters/prisma
@@ -24,7 +24,7 @@ export const prisma = globalForPrisma.prisma || new PrismaClient({
url: dbConnectionString,
},
}
- }: {}),
+ } : {}),
})
if (env.NODE_ENV !== "production") globalForPrisma.prisma = prisma
@@ -32,7 +32,7 @@ if (env.NODE_ENV !== "production") globalForPrisma.prisma = prisma
* Creates a prisma client extension that scopes queries to striclty information
* a given user should be able to access.
*/
-export const userScopedPrismaClientExtension = (accountIds?: string[]) => {
+export const userScopedPrismaClientExtension = (user?: UserWithAccounts) => {
return Prisma.defineExtension(
(prisma) => {
return prisma.$extends({
@@ -46,24 +46,7 @@ export const userScopedPrismaClientExtension = (accountIds?: string[]) => {
argsWithWhere.where = {
...(argsWithWhere.where || {}),
- OR: [
- // Only include repos that are permitted to the user
- ...(accountIds ? [
- {
- permittedAccounts: {
- some: {
- accountId: {
- in: accountIds,
- }
- }
- }
- },
- ] : []),
- // or are public.
- {
- isPublic: true,
- }
- ]
+ ...getRepoPermissionFilterForUser(user),
};
return query(args);
@@ -74,3 +57,29 @@ export const userScopedPrismaClientExtension = (accountIds?: string[]) => {
})
})
}
+
+/**
+ * Returns a filter for repositories that the user has access to.
+ */
+export const getRepoPermissionFilterForUser = (user?: UserWithAccounts): Prisma.RepoWhereInput => {
+ return {
+ OR: [
+ // Only include repos that are permitted to the user
+ ...((user && user.accounts.length > 0) ? [
+ {
+ permittedAccounts: {
+ some: {
+ accountId: {
+ in: user.accounts.map(account => account.id),
+ }
+ }
+ }
+ },
+ ] : []),
+ // or are public.
+ {
+ isPublic: true,
+ }
+ ]
+ }
+}
diff --git a/packages/web/src/proto/google/protobuf/Duration.ts b/packages/web/src/proto/google/protobuf/Duration.ts
new file mode 100644
index 000000000..f24c347f3
--- /dev/null
+++ b/packages/web/src/proto/google/protobuf/Duration.ts
@@ -0,0 +1,13 @@
+// Original file: null
+
+import type { Long } from '@grpc/proto-loader';
+
+export interface Duration {
+ 'seconds'?: (number | string | Long);
+ 'nanos'?: (number);
+}
+
+export interface Duration__Output {
+ 'seconds': (number);
+ 'nanos': (number);
+}
diff --git a/packages/web/src/proto/google/protobuf/Timestamp.ts b/packages/web/src/proto/google/protobuf/Timestamp.ts
new file mode 100644
index 000000000..154167e2c
--- /dev/null
+++ b/packages/web/src/proto/google/protobuf/Timestamp.ts
@@ -0,0 +1,13 @@
+// Original file: null
+
+import type { Long } from '@grpc/proto-loader';
+
+export interface Timestamp {
+ 'seconds'?: (number | string | Long);
+ 'nanos'?: (number);
+}
+
+export interface Timestamp__Output {
+ 'seconds': (number);
+ 'nanos': (number);
+}
diff --git a/packages/web/src/proto/query.ts b/packages/web/src/proto/query.ts
new file mode 100644
index 000000000..93631b521
--- /dev/null
+++ b/packages/web/src/proto/query.ts
@@ -0,0 +1,55 @@
+import type * as grpc from '@grpc/grpc-js';
+import type { MessageTypeDefinition } from '@grpc/proto-loader';
+
+import type { And as _zoekt_webserver_v1_And, And__Output as _zoekt_webserver_v1_And__Output } from './zoekt/webserver/v1/And';
+import type { Boost as _zoekt_webserver_v1_Boost, Boost__Output as _zoekt_webserver_v1_Boost__Output } from './zoekt/webserver/v1/Boost';
+import type { Branch as _zoekt_webserver_v1_Branch, Branch__Output as _zoekt_webserver_v1_Branch__Output } from './zoekt/webserver/v1/Branch';
+import type { BranchRepos as _zoekt_webserver_v1_BranchRepos, BranchRepos__Output as _zoekt_webserver_v1_BranchRepos__Output } from './zoekt/webserver/v1/BranchRepos';
+import type { BranchesRepos as _zoekt_webserver_v1_BranchesRepos, BranchesRepos__Output as _zoekt_webserver_v1_BranchesRepos__Output } from './zoekt/webserver/v1/BranchesRepos';
+import type { FileNameSet as _zoekt_webserver_v1_FileNameSet, FileNameSet__Output as _zoekt_webserver_v1_FileNameSet__Output } from './zoekt/webserver/v1/FileNameSet';
+import type { Language as _zoekt_webserver_v1_Language, Language__Output as _zoekt_webserver_v1_Language__Output } from './zoekt/webserver/v1/Language';
+import type { Not as _zoekt_webserver_v1_Not, Not__Output as _zoekt_webserver_v1_Not__Output } from './zoekt/webserver/v1/Not';
+import type { Or as _zoekt_webserver_v1_Or, Or__Output as _zoekt_webserver_v1_Or__Output } from './zoekt/webserver/v1/Or';
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from './zoekt/webserver/v1/Q';
+import type { RawConfig as _zoekt_webserver_v1_RawConfig, RawConfig__Output as _zoekt_webserver_v1_RawConfig__Output } from './zoekt/webserver/v1/RawConfig';
+import type { Regexp as _zoekt_webserver_v1_Regexp, Regexp__Output as _zoekt_webserver_v1_Regexp__Output } from './zoekt/webserver/v1/Regexp';
+import type { Repo as _zoekt_webserver_v1_Repo, Repo__Output as _zoekt_webserver_v1_Repo__Output } from './zoekt/webserver/v1/Repo';
+import type { RepoIds as _zoekt_webserver_v1_RepoIds, RepoIds__Output as _zoekt_webserver_v1_RepoIds__Output } from './zoekt/webserver/v1/RepoIds';
+import type { RepoRegexp as _zoekt_webserver_v1_RepoRegexp, RepoRegexp__Output as _zoekt_webserver_v1_RepoRegexp__Output } from './zoekt/webserver/v1/RepoRegexp';
+import type { RepoSet as _zoekt_webserver_v1_RepoSet, RepoSet__Output as _zoekt_webserver_v1_RepoSet__Output } from './zoekt/webserver/v1/RepoSet';
+import type { Substring as _zoekt_webserver_v1_Substring, Substring__Output as _zoekt_webserver_v1_Substring__Output } from './zoekt/webserver/v1/Substring';
+import type { Symbol as _zoekt_webserver_v1_Symbol, Symbol__Output as _zoekt_webserver_v1_Symbol__Output } from './zoekt/webserver/v1/Symbol';
+import type { Type as _zoekt_webserver_v1_Type, Type__Output as _zoekt_webserver_v1_Type__Output } from './zoekt/webserver/v1/Type';
+
+type SubtypeConstructor any, Subtype> = {
+ new(...args: ConstructorParameters): Subtype;
+};
+
+export interface ProtoGrpcType {
+ zoekt: {
+ webserver: {
+ v1: {
+ And: MessageTypeDefinition<_zoekt_webserver_v1_And, _zoekt_webserver_v1_And__Output>
+ Boost: MessageTypeDefinition<_zoekt_webserver_v1_Boost, _zoekt_webserver_v1_Boost__Output>
+ Branch: MessageTypeDefinition<_zoekt_webserver_v1_Branch, _zoekt_webserver_v1_Branch__Output>
+ BranchRepos: MessageTypeDefinition<_zoekt_webserver_v1_BranchRepos, _zoekt_webserver_v1_BranchRepos__Output>
+ BranchesRepos: MessageTypeDefinition<_zoekt_webserver_v1_BranchesRepos, _zoekt_webserver_v1_BranchesRepos__Output>
+ FileNameSet: MessageTypeDefinition<_zoekt_webserver_v1_FileNameSet, _zoekt_webserver_v1_FileNameSet__Output>
+ Language: MessageTypeDefinition<_zoekt_webserver_v1_Language, _zoekt_webserver_v1_Language__Output>
+ Not: MessageTypeDefinition<_zoekt_webserver_v1_Not, _zoekt_webserver_v1_Not__Output>
+ Or: MessageTypeDefinition<_zoekt_webserver_v1_Or, _zoekt_webserver_v1_Or__Output>
+ Q: MessageTypeDefinition<_zoekt_webserver_v1_Q, _zoekt_webserver_v1_Q__Output>
+ RawConfig: MessageTypeDefinition<_zoekt_webserver_v1_RawConfig, _zoekt_webserver_v1_RawConfig__Output>
+ Regexp: MessageTypeDefinition<_zoekt_webserver_v1_Regexp, _zoekt_webserver_v1_Regexp__Output>
+ Repo: MessageTypeDefinition<_zoekt_webserver_v1_Repo, _zoekt_webserver_v1_Repo__Output>
+ RepoIds: MessageTypeDefinition<_zoekt_webserver_v1_RepoIds, _zoekt_webserver_v1_RepoIds__Output>
+ RepoRegexp: MessageTypeDefinition<_zoekt_webserver_v1_RepoRegexp, _zoekt_webserver_v1_RepoRegexp__Output>
+ RepoSet: MessageTypeDefinition<_zoekt_webserver_v1_RepoSet, _zoekt_webserver_v1_RepoSet__Output>
+ Substring: MessageTypeDefinition<_zoekt_webserver_v1_Substring, _zoekt_webserver_v1_Substring__Output>
+ Symbol: MessageTypeDefinition<_zoekt_webserver_v1_Symbol, _zoekt_webserver_v1_Symbol__Output>
+ Type: MessageTypeDefinition<_zoekt_webserver_v1_Type, _zoekt_webserver_v1_Type__Output>
+ }
+ }
+ }
+}
+
diff --git a/packages/web/src/proto/webserver.ts b/packages/web/src/proto/webserver.ts
new file mode 100644
index 000000000..0dc98637e
--- /dev/null
+++ b/packages/web/src/proto/webserver.ts
@@ -0,0 +1,112 @@
+import type * as grpc from '@grpc/grpc-js';
+import type { EnumTypeDefinition, MessageTypeDefinition } from '@grpc/proto-loader';
+
+import type { Duration as _google_protobuf_Duration, Duration__Output as _google_protobuf_Duration__Output } from './google/protobuf/Duration';
+import type { Timestamp as _google_protobuf_Timestamp, Timestamp__Output as _google_protobuf_Timestamp__Output } from './google/protobuf/Timestamp';
+import type { And as _zoekt_webserver_v1_And, And__Output as _zoekt_webserver_v1_And__Output } from './zoekt/webserver/v1/And';
+import type { Boost as _zoekt_webserver_v1_Boost, Boost__Output as _zoekt_webserver_v1_Boost__Output } from './zoekt/webserver/v1/Boost';
+import type { Branch as _zoekt_webserver_v1_Branch, Branch__Output as _zoekt_webserver_v1_Branch__Output } from './zoekt/webserver/v1/Branch';
+import type { BranchRepos as _zoekt_webserver_v1_BranchRepos, BranchRepos__Output as _zoekt_webserver_v1_BranchRepos__Output } from './zoekt/webserver/v1/BranchRepos';
+import type { BranchesRepos as _zoekt_webserver_v1_BranchesRepos, BranchesRepos__Output as _zoekt_webserver_v1_BranchesRepos__Output } from './zoekt/webserver/v1/BranchesRepos';
+import type { ChunkMatch as _zoekt_webserver_v1_ChunkMatch, ChunkMatch__Output as _zoekt_webserver_v1_ChunkMatch__Output } from './zoekt/webserver/v1/ChunkMatch';
+import type { FileMatch as _zoekt_webserver_v1_FileMatch, FileMatch__Output as _zoekt_webserver_v1_FileMatch__Output } from './zoekt/webserver/v1/FileMatch';
+import type { FileNameSet as _zoekt_webserver_v1_FileNameSet, FileNameSet__Output as _zoekt_webserver_v1_FileNameSet__Output } from './zoekt/webserver/v1/FileNameSet';
+import type { IndexMetadata as _zoekt_webserver_v1_IndexMetadata, IndexMetadata__Output as _zoekt_webserver_v1_IndexMetadata__Output } from './zoekt/webserver/v1/IndexMetadata';
+import type { Language as _zoekt_webserver_v1_Language, Language__Output as _zoekt_webserver_v1_Language__Output } from './zoekt/webserver/v1/Language';
+import type { LineFragmentMatch as _zoekt_webserver_v1_LineFragmentMatch, LineFragmentMatch__Output as _zoekt_webserver_v1_LineFragmentMatch__Output } from './zoekt/webserver/v1/LineFragmentMatch';
+import type { LineMatch as _zoekt_webserver_v1_LineMatch, LineMatch__Output as _zoekt_webserver_v1_LineMatch__Output } from './zoekt/webserver/v1/LineMatch';
+import type { ListOptions as _zoekt_webserver_v1_ListOptions, ListOptions__Output as _zoekt_webserver_v1_ListOptions__Output } from './zoekt/webserver/v1/ListOptions';
+import type { ListRequest as _zoekt_webserver_v1_ListRequest, ListRequest__Output as _zoekt_webserver_v1_ListRequest__Output } from './zoekt/webserver/v1/ListRequest';
+import type { ListResponse as _zoekt_webserver_v1_ListResponse, ListResponse__Output as _zoekt_webserver_v1_ListResponse__Output } from './zoekt/webserver/v1/ListResponse';
+import type { Location as _zoekt_webserver_v1_Location, Location__Output as _zoekt_webserver_v1_Location__Output } from './zoekt/webserver/v1/Location';
+import type { MinimalRepoListEntry as _zoekt_webserver_v1_MinimalRepoListEntry, MinimalRepoListEntry__Output as _zoekt_webserver_v1_MinimalRepoListEntry__Output } from './zoekt/webserver/v1/MinimalRepoListEntry';
+import type { Not as _zoekt_webserver_v1_Not, Not__Output as _zoekt_webserver_v1_Not__Output } from './zoekt/webserver/v1/Not';
+import type { Or as _zoekt_webserver_v1_Or, Or__Output as _zoekt_webserver_v1_Or__Output } from './zoekt/webserver/v1/Or';
+import type { Progress as _zoekt_webserver_v1_Progress, Progress__Output as _zoekt_webserver_v1_Progress__Output } from './zoekt/webserver/v1/Progress';
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from './zoekt/webserver/v1/Q';
+import type { Range as _zoekt_webserver_v1_Range, Range__Output as _zoekt_webserver_v1_Range__Output } from './zoekt/webserver/v1/Range';
+import type { RawConfig as _zoekt_webserver_v1_RawConfig, RawConfig__Output as _zoekt_webserver_v1_RawConfig__Output } from './zoekt/webserver/v1/RawConfig';
+import type { Regexp as _zoekt_webserver_v1_Regexp, Regexp__Output as _zoekt_webserver_v1_Regexp__Output } from './zoekt/webserver/v1/Regexp';
+import type { Repo as _zoekt_webserver_v1_Repo, Repo__Output as _zoekt_webserver_v1_Repo__Output } from './zoekt/webserver/v1/Repo';
+import type { RepoIds as _zoekt_webserver_v1_RepoIds, RepoIds__Output as _zoekt_webserver_v1_RepoIds__Output } from './zoekt/webserver/v1/RepoIds';
+import type { RepoListEntry as _zoekt_webserver_v1_RepoListEntry, RepoListEntry__Output as _zoekt_webserver_v1_RepoListEntry__Output } from './zoekt/webserver/v1/RepoListEntry';
+import type { RepoRegexp as _zoekt_webserver_v1_RepoRegexp, RepoRegexp__Output as _zoekt_webserver_v1_RepoRegexp__Output } from './zoekt/webserver/v1/RepoRegexp';
+import type { RepoSet as _zoekt_webserver_v1_RepoSet, RepoSet__Output as _zoekt_webserver_v1_RepoSet__Output } from './zoekt/webserver/v1/RepoSet';
+import type { RepoStats as _zoekt_webserver_v1_RepoStats, RepoStats__Output as _zoekt_webserver_v1_RepoStats__Output } from './zoekt/webserver/v1/RepoStats';
+import type { Repository as _zoekt_webserver_v1_Repository, Repository__Output as _zoekt_webserver_v1_Repository__Output } from './zoekt/webserver/v1/Repository';
+import type { RepositoryBranch as _zoekt_webserver_v1_RepositoryBranch, RepositoryBranch__Output as _zoekt_webserver_v1_RepositoryBranch__Output } from './zoekt/webserver/v1/RepositoryBranch';
+import type { SearchOptions as _zoekt_webserver_v1_SearchOptions, SearchOptions__Output as _zoekt_webserver_v1_SearchOptions__Output } from './zoekt/webserver/v1/SearchOptions';
+import type { SearchRequest as _zoekt_webserver_v1_SearchRequest, SearchRequest__Output as _zoekt_webserver_v1_SearchRequest__Output } from './zoekt/webserver/v1/SearchRequest';
+import type { SearchResponse as _zoekt_webserver_v1_SearchResponse, SearchResponse__Output as _zoekt_webserver_v1_SearchResponse__Output } from './zoekt/webserver/v1/SearchResponse';
+import type { Stats as _zoekt_webserver_v1_Stats, Stats__Output as _zoekt_webserver_v1_Stats__Output } from './zoekt/webserver/v1/Stats';
+import type { StreamSearchRequest as _zoekt_webserver_v1_StreamSearchRequest, StreamSearchRequest__Output as _zoekt_webserver_v1_StreamSearchRequest__Output } from './zoekt/webserver/v1/StreamSearchRequest';
+import type { StreamSearchResponse as _zoekt_webserver_v1_StreamSearchResponse, StreamSearchResponse__Output as _zoekt_webserver_v1_StreamSearchResponse__Output } from './zoekt/webserver/v1/StreamSearchResponse';
+import type { Substring as _zoekt_webserver_v1_Substring, Substring__Output as _zoekt_webserver_v1_Substring__Output } from './zoekt/webserver/v1/Substring';
+import type { Symbol as _zoekt_webserver_v1_Symbol, Symbol__Output as _zoekt_webserver_v1_Symbol__Output } from './zoekt/webserver/v1/Symbol';
+import type { SymbolInfo as _zoekt_webserver_v1_SymbolInfo, SymbolInfo__Output as _zoekt_webserver_v1_SymbolInfo__Output } from './zoekt/webserver/v1/SymbolInfo';
+import type { Type as _zoekt_webserver_v1_Type, Type__Output as _zoekt_webserver_v1_Type__Output } from './zoekt/webserver/v1/Type';
+import type { WebserverServiceClient as _zoekt_webserver_v1_WebserverServiceClient, WebserverServiceDefinition as _zoekt_webserver_v1_WebserverServiceDefinition } from './zoekt/webserver/v1/WebserverService';
+
+type SubtypeConstructor any, Subtype> = {
+ new(...args: ConstructorParameters): Subtype;
+};
+
+export interface ProtoGrpcType {
+ google: {
+ protobuf: {
+ Duration: MessageTypeDefinition<_google_protobuf_Duration, _google_protobuf_Duration__Output>
+ Timestamp: MessageTypeDefinition<_google_protobuf_Timestamp, _google_protobuf_Timestamp__Output>
+ }
+ }
+ zoekt: {
+ webserver: {
+ v1: {
+ And: MessageTypeDefinition<_zoekt_webserver_v1_And, _zoekt_webserver_v1_And__Output>
+ Boost: MessageTypeDefinition<_zoekt_webserver_v1_Boost, _zoekt_webserver_v1_Boost__Output>
+ Branch: MessageTypeDefinition<_zoekt_webserver_v1_Branch, _zoekt_webserver_v1_Branch__Output>
+ BranchRepos: MessageTypeDefinition<_zoekt_webserver_v1_BranchRepos, _zoekt_webserver_v1_BranchRepos__Output>
+ BranchesRepos: MessageTypeDefinition<_zoekt_webserver_v1_BranchesRepos, _zoekt_webserver_v1_BranchesRepos__Output>
+ ChunkMatch: MessageTypeDefinition<_zoekt_webserver_v1_ChunkMatch, _zoekt_webserver_v1_ChunkMatch__Output>
+ FileMatch: MessageTypeDefinition<_zoekt_webserver_v1_FileMatch, _zoekt_webserver_v1_FileMatch__Output>
+ FileNameSet: MessageTypeDefinition<_zoekt_webserver_v1_FileNameSet, _zoekt_webserver_v1_FileNameSet__Output>
+ FlushReason: EnumTypeDefinition
+ IndexMetadata: MessageTypeDefinition<_zoekt_webserver_v1_IndexMetadata, _zoekt_webserver_v1_IndexMetadata__Output>
+ Language: MessageTypeDefinition<_zoekt_webserver_v1_Language, _zoekt_webserver_v1_Language__Output>
+ LineFragmentMatch: MessageTypeDefinition<_zoekt_webserver_v1_LineFragmentMatch, _zoekt_webserver_v1_LineFragmentMatch__Output>
+ LineMatch: MessageTypeDefinition<_zoekt_webserver_v1_LineMatch, _zoekt_webserver_v1_LineMatch__Output>
+ ListOptions: MessageTypeDefinition<_zoekt_webserver_v1_ListOptions, _zoekt_webserver_v1_ListOptions__Output>
+ ListRequest: MessageTypeDefinition<_zoekt_webserver_v1_ListRequest, _zoekt_webserver_v1_ListRequest__Output>
+ ListResponse: MessageTypeDefinition<_zoekt_webserver_v1_ListResponse, _zoekt_webserver_v1_ListResponse__Output>
+ Location: MessageTypeDefinition<_zoekt_webserver_v1_Location, _zoekt_webserver_v1_Location__Output>
+ MinimalRepoListEntry: MessageTypeDefinition<_zoekt_webserver_v1_MinimalRepoListEntry, _zoekt_webserver_v1_MinimalRepoListEntry__Output>
+ Not: MessageTypeDefinition<_zoekt_webserver_v1_Not, _zoekt_webserver_v1_Not__Output>
+ Or: MessageTypeDefinition<_zoekt_webserver_v1_Or, _zoekt_webserver_v1_Or__Output>
+ Progress: MessageTypeDefinition<_zoekt_webserver_v1_Progress, _zoekt_webserver_v1_Progress__Output>
+ Q: MessageTypeDefinition<_zoekt_webserver_v1_Q, _zoekt_webserver_v1_Q__Output>
+ Range: MessageTypeDefinition<_zoekt_webserver_v1_Range, _zoekt_webserver_v1_Range__Output>
+ RawConfig: MessageTypeDefinition<_zoekt_webserver_v1_RawConfig, _zoekt_webserver_v1_RawConfig__Output>
+ Regexp: MessageTypeDefinition<_zoekt_webserver_v1_Regexp, _zoekt_webserver_v1_Regexp__Output>
+ Repo: MessageTypeDefinition<_zoekt_webserver_v1_Repo, _zoekt_webserver_v1_Repo__Output>
+ RepoIds: MessageTypeDefinition<_zoekt_webserver_v1_RepoIds, _zoekt_webserver_v1_RepoIds__Output>
+ RepoListEntry: MessageTypeDefinition<_zoekt_webserver_v1_RepoListEntry, _zoekt_webserver_v1_RepoListEntry__Output>
+ RepoRegexp: MessageTypeDefinition<_zoekt_webserver_v1_RepoRegexp, _zoekt_webserver_v1_RepoRegexp__Output>
+ RepoSet: MessageTypeDefinition<_zoekt_webserver_v1_RepoSet, _zoekt_webserver_v1_RepoSet__Output>
+ RepoStats: MessageTypeDefinition<_zoekt_webserver_v1_RepoStats, _zoekt_webserver_v1_RepoStats__Output>
+ Repository: MessageTypeDefinition<_zoekt_webserver_v1_Repository, _zoekt_webserver_v1_Repository__Output>
+ RepositoryBranch: MessageTypeDefinition<_zoekt_webserver_v1_RepositoryBranch, _zoekt_webserver_v1_RepositoryBranch__Output>
+ SearchOptions: MessageTypeDefinition<_zoekt_webserver_v1_SearchOptions, _zoekt_webserver_v1_SearchOptions__Output>
+ SearchRequest: MessageTypeDefinition<_zoekt_webserver_v1_SearchRequest, _zoekt_webserver_v1_SearchRequest__Output>
+ SearchResponse: MessageTypeDefinition<_zoekt_webserver_v1_SearchResponse, _zoekt_webserver_v1_SearchResponse__Output>
+ Stats: MessageTypeDefinition<_zoekt_webserver_v1_Stats, _zoekt_webserver_v1_Stats__Output>
+ StreamSearchRequest: MessageTypeDefinition<_zoekt_webserver_v1_StreamSearchRequest, _zoekt_webserver_v1_StreamSearchRequest__Output>
+ StreamSearchResponse: MessageTypeDefinition<_zoekt_webserver_v1_StreamSearchResponse, _zoekt_webserver_v1_StreamSearchResponse__Output>
+ Substring: MessageTypeDefinition<_zoekt_webserver_v1_Substring, _zoekt_webserver_v1_Substring__Output>
+ Symbol: MessageTypeDefinition<_zoekt_webserver_v1_Symbol, _zoekt_webserver_v1_Symbol__Output>
+ SymbolInfo: MessageTypeDefinition<_zoekt_webserver_v1_SymbolInfo, _zoekt_webserver_v1_SymbolInfo__Output>
+ Type: MessageTypeDefinition<_zoekt_webserver_v1_Type, _zoekt_webserver_v1_Type__Output>
+ WebserverService: SubtypeConstructor & { service: _zoekt_webserver_v1_WebserverServiceDefinition }
+ }
+ }
+ }
+}
+
diff --git a/packages/web/src/proto/zoekt/webserver/v1/And.ts b/packages/web/src/proto/zoekt/webserver/v1/And.ts
new file mode 100644
index 000000000..61fff3c43
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/And.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+/**
+ * And is matched when all its children are.
+ */
+export interface And {
+ 'children'?: (_zoekt_webserver_v1_Q)[];
+}
+
+/**
+ * And is matched when all its children are.
+ */
+export interface And__Output {
+ 'children': (_zoekt_webserver_v1_Q__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Boost.ts b/packages/web/src/proto/zoekt/webserver/v1/Boost.ts
new file mode 100644
index 000000000..d5f03a808
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Boost.ts
@@ -0,0 +1,19 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+/**
+ * Boost multiplies the score of its child by the boost factor.
+ */
+export interface Boost {
+ 'child'?: (_zoekt_webserver_v1_Q | null);
+ 'boost'?: (number | string);
+}
+
+/**
+ * Boost multiplies the score of its child by the boost factor.
+ */
+export interface Boost__Output {
+ 'child': (_zoekt_webserver_v1_Q__Output | null);
+ 'boost': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Branch.ts b/packages/web/src/proto/zoekt/webserver/v1/Branch.ts
new file mode 100644
index 000000000..6b7afce8d
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Branch.ts
@@ -0,0 +1,24 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * Branch limits search to a specific branch.
+ */
+export interface Branch {
+ 'pattern'?: (string);
+ /**
+ * exact is true if we want to Pattern to equal branch.
+ */
+ 'exact'?: (boolean);
+}
+
+/**
+ * Branch limits search to a specific branch.
+ */
+export interface Branch__Output {
+ 'pattern': (string);
+ /**
+ * exact is true if we want to Pattern to equal branch.
+ */
+ 'exact': (boolean);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/BranchRepos.ts b/packages/web/src/proto/zoekt/webserver/v1/BranchRepos.ts
new file mode 100644
index 000000000..7badc6d18
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/BranchRepos.ts
@@ -0,0 +1,26 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * BranchRepos is a (branch, sourcegraph repo ids bitmap) tuple. It is a
+ * Sourcegraph addition.
+ */
+export interface BranchRepos {
+ 'branch'?: (string);
+ /**
+ * a serialized roaring bitmap of the target repo ids
+ */
+ 'repos'?: (Buffer | Uint8Array | string);
+}
+
+/**
+ * BranchRepos is a (branch, sourcegraph repo ids bitmap) tuple. It is a
+ * Sourcegraph addition.
+ */
+export interface BranchRepos__Output {
+ 'branch': (string);
+ /**
+ * a serialized roaring bitmap of the target repo ids
+ */
+ 'repos': (Buffer);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/BranchesRepos.ts b/packages/web/src/proto/zoekt/webserver/v1/BranchesRepos.ts
new file mode 100644
index 000000000..b22b5a086
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/BranchesRepos.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { BranchRepos as _zoekt_webserver_v1_BranchRepos, BranchRepos__Output as _zoekt_webserver_v1_BranchRepos__Output } from '../../../zoekt/webserver/v1/BranchRepos';
+
+/**
+ * BranchesRepos is a slice of BranchRepos to match.
+ */
+export interface BranchesRepos {
+ 'list'?: (_zoekt_webserver_v1_BranchRepos)[];
+}
+
+/**
+ * BranchesRepos is a slice of BranchRepos to match.
+ */
+export interface BranchesRepos__Output {
+ 'list': (_zoekt_webserver_v1_BranchRepos__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/ChunkMatch.ts b/packages/web/src/proto/zoekt/webserver/v1/ChunkMatch.ts
new file mode 100644
index 000000000..66d47099b
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/ChunkMatch.ts
@@ -0,0 +1,67 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Location as _zoekt_webserver_v1_Location, Location__Output as _zoekt_webserver_v1_Location__Output } from '../../../zoekt/webserver/v1/Location';
+import type { Range as _zoekt_webserver_v1_Range, Range__Output as _zoekt_webserver_v1_Range__Output } from '../../../zoekt/webserver/v1/Range';
+import type { SymbolInfo as _zoekt_webserver_v1_SymbolInfo, SymbolInfo__Output as _zoekt_webserver_v1_SymbolInfo__Output } from '../../../zoekt/webserver/v1/SymbolInfo';
+
+export interface ChunkMatch {
+ /**
+ * A contiguous range of complete lines that fully contains Ranges.
+ */
+ 'content'?: (Buffer | Uint8Array | string);
+ /**
+ * The location (inclusive) of the beginning of content
+ * relative to the beginning of the file. It will always be at the
+ * beginning of a line (Column will always be 1).
+ */
+ 'content_start'?: (_zoekt_webserver_v1_Location | null);
+ /**
+ * True if this match is a match on the file name, in
+ * which case Content will contain the file name.
+ */
+ 'file_name'?: (boolean);
+ /**
+ * A set of matching ranges within this chunk. Each range is relative
+ * to the beginning of the file (not the beginning of Content).
+ */
+ 'ranges'?: (_zoekt_webserver_v1_Range)[];
+ /**
+ * The symbol information associated with Ranges. If it is non-nil,
+ * its length will equal that of Ranges. Any of its elements may be nil.
+ */
+ 'symbol_info'?: (_zoekt_webserver_v1_SymbolInfo)[];
+ 'score'?: (number | string);
+ 'debug_score'?: (string);
+ 'best_line_match'?: (number);
+}
+
+export interface ChunkMatch__Output {
+ /**
+ * A contiguous range of complete lines that fully contains Ranges.
+ */
+ 'content': (Buffer);
+ /**
+ * The location (inclusive) of the beginning of content
+ * relative to the beginning of the file. It will always be at the
+ * beginning of a line (Column will always be 1).
+ */
+ 'content_start': (_zoekt_webserver_v1_Location__Output | null);
+ /**
+ * True if this match is a match on the file name, in
+ * which case Content will contain the file name.
+ */
+ 'file_name': (boolean);
+ /**
+ * A set of matching ranges within this chunk. Each range is relative
+ * to the beginning of the file (not the beginning of Content).
+ */
+ 'ranges': (_zoekt_webserver_v1_Range__Output)[];
+ /**
+ * The symbol information associated with Ranges. If it is non-nil,
+ * its length will equal that of Ranges. Any of its elements may be nil.
+ */
+ 'symbol_info': (_zoekt_webserver_v1_SymbolInfo__Output)[];
+ 'score': (number);
+ 'debug_score': (string);
+ 'best_line_match': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/FileMatch.ts b/packages/web/src/proto/zoekt/webserver/v1/FileMatch.ts
new file mode 100644
index 000000000..7150fbc5b
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/FileMatch.ts
@@ -0,0 +1,132 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { LineMatch as _zoekt_webserver_v1_LineMatch, LineMatch__Output as _zoekt_webserver_v1_LineMatch__Output } from '../../../zoekt/webserver/v1/LineMatch';
+import type { ChunkMatch as _zoekt_webserver_v1_ChunkMatch, ChunkMatch__Output as _zoekt_webserver_v1_ChunkMatch__Output } from '../../../zoekt/webserver/v1/ChunkMatch';
+
+/**
+ * FileMatch contains all the matches within a file.
+ */
+export interface FileMatch {
+ /**
+ * Ranking; the higher, the better.
+ */
+ 'score'?: (number | string);
+ /**
+ * For debugging. Needs DebugScore set, but public so tests in
+ * other packages can print some diagnostics.
+ */
+ 'debug'?: (string);
+ /**
+ * The repository-relative path to the file.
+ * 🚨 Warning: file_name might not be a valid UTF-8 string.
+ */
+ 'file_name'?: (Buffer | Uint8Array | string);
+ /**
+ * Repository is the globally unique name of the repo of the
+ * match
+ */
+ 'repository'?: (string);
+ 'branches'?: (string)[];
+ /**
+ * One of line_matches or chunk_matches will be returned depending on whether
+ * the SearchOptions.ChunkMatches is set.
+ */
+ 'line_matches'?: (_zoekt_webserver_v1_LineMatch)[];
+ 'chunk_matches'?: (_zoekt_webserver_v1_ChunkMatch)[];
+ /**
+ * repository_id is a Sourcegraph extension. This is the ID of Repository in
+ * Sourcegraph.
+ */
+ 'repository_id'?: (number);
+ 'repository_priority'?: (number | string);
+ /**
+ * Only set if requested
+ */
+ 'content'?: (Buffer | Uint8Array | string);
+ /**
+ * Checksum of the content.
+ */
+ 'checksum'?: (Buffer | Uint8Array | string);
+ /**
+ * Detected language of the result.
+ */
+ 'language'?: (string);
+ /**
+ * sub_repository_name is the globally unique name of the repo,
+ * if it came from a subrepository
+ */
+ 'sub_repository_name'?: (string);
+ /**
+ * sub_repository_path holds the prefix where the subrepository
+ * was mounted.
+ */
+ 'sub_repository_path'?: (string);
+ /**
+ * Commit SHA1 (hex) of the (sub)repo holding the file.
+ */
+ 'version'?: (string);
+}
+
+/**
+ * FileMatch contains all the matches within a file.
+ */
+export interface FileMatch__Output {
+ /**
+ * Ranking; the higher, the better.
+ */
+ 'score': (number);
+ /**
+ * For debugging. Needs DebugScore set, but public so tests in
+ * other packages can print some diagnostics.
+ */
+ 'debug': (string);
+ /**
+ * The repository-relative path to the file.
+ * 🚨 Warning: file_name might not be a valid UTF-8 string.
+ */
+ 'file_name': (Buffer);
+ /**
+ * Repository is the globally unique name of the repo of the
+ * match
+ */
+ 'repository': (string);
+ 'branches': (string)[];
+ /**
+ * One of line_matches or chunk_matches will be returned depending on whether
+ * the SearchOptions.ChunkMatches is set.
+ */
+ 'line_matches': (_zoekt_webserver_v1_LineMatch__Output)[];
+ 'chunk_matches': (_zoekt_webserver_v1_ChunkMatch__Output)[];
+ /**
+ * repository_id is a Sourcegraph extension. This is the ID of Repository in
+ * Sourcegraph.
+ */
+ 'repository_id': (number);
+ 'repository_priority': (number);
+ /**
+ * Only set if requested
+ */
+ 'content': (Buffer);
+ /**
+ * Checksum of the content.
+ */
+ 'checksum': (Buffer);
+ /**
+ * Detected language of the result.
+ */
+ 'language': (string);
+ /**
+ * sub_repository_name is the globally unique name of the repo,
+ * if it came from a subrepository
+ */
+ 'sub_repository_name': (string);
+ /**
+ * sub_repository_path holds the prefix where the subrepository
+ * was mounted.
+ */
+ 'sub_repository_path': (string);
+ /**
+ * Commit SHA1 (hex) of the (sub)repo holding the file.
+ */
+ 'version': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/FileNameSet.ts b/packages/web/src/proto/zoekt/webserver/v1/FileNameSet.ts
new file mode 100644
index 000000000..38aea627d
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/FileNameSet.ts
@@ -0,0 +1,16 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * FileNameSet is a list of file names to match.
+ */
+export interface FileNameSet {
+ 'set'?: (string)[];
+}
+
+/**
+ * FileNameSet is a list of file names to match.
+ */
+export interface FileNameSet__Output {
+ 'set': (string)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/FlushReason.ts b/packages/web/src/proto/zoekt/webserver/v1/FlushReason.ts
new file mode 100644
index 000000000..2cdad17d7
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/FlushReason.ts
@@ -0,0 +1,20 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+export const FlushReason = {
+ FLUSH_REASON_UNKNOWN_UNSPECIFIED: 'FLUSH_REASON_UNKNOWN_UNSPECIFIED',
+ FLUSH_REASON_TIMER_EXPIRED: 'FLUSH_REASON_TIMER_EXPIRED',
+ FLUSH_REASON_FINAL_FLUSH: 'FLUSH_REASON_FINAL_FLUSH',
+ FLUSH_REASON_MAX_SIZE: 'FLUSH_REASON_MAX_SIZE',
+} as const;
+
+export type FlushReason =
+ | 'FLUSH_REASON_UNKNOWN_UNSPECIFIED'
+ | 0
+ | 'FLUSH_REASON_TIMER_EXPIRED'
+ | 1
+ | 'FLUSH_REASON_FINAL_FLUSH'
+ | 2
+ | 'FLUSH_REASON_MAX_SIZE'
+ | 3
+
+export type FlushReason__Output = typeof FlushReason[keyof typeof FlushReason]
diff --git a/packages/web/src/proto/zoekt/webserver/v1/IndexMetadata.ts b/packages/web/src/proto/zoekt/webserver/v1/IndexMetadata.ts
new file mode 100644
index 000000000..3de1dbf68
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/IndexMetadata.ts
@@ -0,0 +1,26 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Timestamp as _google_protobuf_Timestamp, Timestamp__Output as _google_protobuf_Timestamp__Output } from '../../../google/protobuf/Timestamp';
+import type { Long } from '@grpc/proto-loader';
+
+export interface IndexMetadata {
+ 'index_format_version'?: (number | string | Long);
+ 'index_feature_version'?: (number | string | Long);
+ 'index_min_reader_version'?: (number | string | Long);
+ 'index_time'?: (_google_protobuf_Timestamp | null);
+ 'plain_ascii'?: (boolean);
+ 'language_map'?: ({[key: string]: number});
+ 'zoekt_version'?: (string);
+ 'id'?: (string);
+}
+
+export interface IndexMetadata__Output {
+ 'index_format_version': (number);
+ 'index_feature_version': (number);
+ 'index_min_reader_version': (number);
+ 'index_time': (_google_protobuf_Timestamp__Output | null);
+ 'plain_ascii': (boolean);
+ 'language_map': ({[key: string]: number});
+ 'zoekt_version': (string);
+ 'id': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Language.ts b/packages/web/src/proto/zoekt/webserver/v1/Language.ts
new file mode 100644
index 000000000..4f2a9e6bd
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Language.ts
@@ -0,0 +1,10 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+export interface Language {
+ 'language'?: (string);
+}
+
+export interface Language__Output {
+ 'language': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/LineFragmentMatch.ts b/packages/web/src/proto/zoekt/webserver/v1/LineFragmentMatch.ts
new file mode 100644
index 000000000..7c21fa20e
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/LineFragmentMatch.ts
@@ -0,0 +1,38 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { SymbolInfo as _zoekt_webserver_v1_SymbolInfo, SymbolInfo__Output as _zoekt_webserver_v1_SymbolInfo__Output } from '../../../zoekt/webserver/v1/SymbolInfo';
+import type { Long } from '@grpc/proto-loader';
+
+export interface LineFragmentMatch {
+ /**
+ * Offset within the line, in bytes.
+ */
+ 'line_offset'?: (number | string | Long);
+ /**
+ * Offset from file start, in bytes.
+ */
+ 'offset'?: (number);
+ /**
+ * Number bytes that match.
+ */
+ 'match_length'?: (number | string | Long);
+ 'symbol_info'?: (_zoekt_webserver_v1_SymbolInfo | null);
+ '_symbol_info'?: "symbol_info";
+}
+
+export interface LineFragmentMatch__Output {
+ /**
+ * Offset within the line, in bytes.
+ */
+ 'line_offset': (number);
+ /**
+ * Offset from file start, in bytes.
+ */
+ 'offset': (number);
+ /**
+ * Number bytes that match.
+ */
+ 'match_length': (number);
+ 'symbol_info'?: (_zoekt_webserver_v1_SymbolInfo__Output | null);
+ '_symbol_info'?: "symbol_info";
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/LineMatch.ts b/packages/web/src/proto/zoekt/webserver/v1/LineMatch.ts
new file mode 100644
index 000000000..f58361bdf
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/LineMatch.ts
@@ -0,0 +1,50 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { LineFragmentMatch as _zoekt_webserver_v1_LineFragmentMatch, LineFragmentMatch__Output as _zoekt_webserver_v1_LineFragmentMatch__Output } from '../../../zoekt/webserver/v1/LineFragmentMatch';
+import type { Long } from '@grpc/proto-loader';
+
+export interface LineMatch {
+ 'line'?: (Buffer | Uint8Array | string);
+ 'line_start'?: (number | string | Long);
+ 'line_end'?: (number | string | Long);
+ 'line_number'?: (number | string | Long);
+ /**
+ * before and after are only set when SearchOptions.NumContextLines is > 0
+ */
+ 'before'?: (Buffer | Uint8Array | string);
+ 'after'?: (Buffer | Uint8Array | string);
+ /**
+ * If set, this was a match on the filename.
+ */
+ 'file_name'?: (boolean);
+ /**
+ * The higher the better. Only ranks the quality of the match
+ * within the file, does not take rank of file into account
+ */
+ 'score'?: (number | string);
+ 'debug_score'?: (string);
+ 'line_fragments'?: (_zoekt_webserver_v1_LineFragmentMatch)[];
+}
+
+export interface LineMatch__Output {
+ 'line': (Buffer);
+ 'line_start': (number);
+ 'line_end': (number);
+ 'line_number': (number);
+ /**
+ * before and after are only set when SearchOptions.NumContextLines is > 0
+ */
+ 'before': (Buffer);
+ 'after': (Buffer);
+ /**
+ * If set, this was a match on the filename.
+ */
+ 'file_name': (boolean);
+ /**
+ * The higher the better. Only ranks the quality of the match
+ * within the file, does not take rank of file into account
+ */
+ 'score': (number);
+ 'debug_score': (string);
+ 'line_fragments': (_zoekt_webserver_v1_LineFragmentMatch__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/ListOptions.ts b/packages/web/src/proto/zoekt/webserver/v1/ListOptions.ts
new file mode 100644
index 000000000..1b1784384
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/ListOptions.ts
@@ -0,0 +1,34 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+export const _zoekt_webserver_v1_ListOptions_RepoListField = {
+ REPO_LIST_FIELD_UNKNOWN_UNSPECIFIED: 'REPO_LIST_FIELD_UNKNOWN_UNSPECIFIED',
+ REPO_LIST_FIELD_REPOS: 'REPO_LIST_FIELD_REPOS',
+ REPO_LIST_FIELD_REPOS_MAP: 'REPO_LIST_FIELD_REPOS_MAP',
+} as const;
+
+export type _zoekt_webserver_v1_ListOptions_RepoListField =
+ | 'REPO_LIST_FIELD_UNKNOWN_UNSPECIFIED'
+ | 0
+ | 'REPO_LIST_FIELD_REPOS'
+ | 1
+ | 'REPO_LIST_FIELD_REPOS_MAP'
+ | 3
+
+export type _zoekt_webserver_v1_ListOptions_RepoListField__Output = typeof _zoekt_webserver_v1_ListOptions_RepoListField[keyof typeof _zoekt_webserver_v1_ListOptions_RepoListField]
+
+export interface ListOptions {
+ /**
+ * Field decides which field to populate in RepoList response.
+ */
+ 'field'?: (_zoekt_webserver_v1_ListOptions_RepoListField);
+}
+
+export interface ListOptions__Output {
+ /**
+ * Field decides which field to populate in RepoList response.
+ */
+ 'field': (_zoekt_webserver_v1_ListOptions_RepoListField__Output);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/ListRequest.ts b/packages/web/src/proto/zoekt/webserver/v1/ListRequest.ts
new file mode 100644
index 000000000..90fd117f9
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/ListRequest.ts
@@ -0,0 +1,14 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+import type { ListOptions as _zoekt_webserver_v1_ListOptions, ListOptions__Output as _zoekt_webserver_v1_ListOptions__Output } from '../../../zoekt/webserver/v1/ListOptions';
+
+export interface ListRequest {
+ 'query'?: (_zoekt_webserver_v1_Q | null);
+ 'opts'?: (_zoekt_webserver_v1_ListOptions | null);
+}
+
+export interface ListRequest__Output {
+ 'query': (_zoekt_webserver_v1_Q__Output | null);
+ 'opts': (_zoekt_webserver_v1_ListOptions__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/ListResponse.ts b/packages/web/src/proto/zoekt/webserver/v1/ListResponse.ts
new file mode 100644
index 000000000..909ca2213
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/ListResponse.ts
@@ -0,0 +1,40 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { RepoListEntry as _zoekt_webserver_v1_RepoListEntry, RepoListEntry__Output as _zoekt_webserver_v1_RepoListEntry__Output } from '../../../zoekt/webserver/v1/RepoListEntry';
+import type { MinimalRepoListEntry as _zoekt_webserver_v1_MinimalRepoListEntry, MinimalRepoListEntry__Output as _zoekt_webserver_v1_MinimalRepoListEntry__Output } from '../../../zoekt/webserver/v1/MinimalRepoListEntry';
+import type { RepoStats as _zoekt_webserver_v1_RepoStats, RepoStats__Output as _zoekt_webserver_v1_RepoStats__Output } from '../../../zoekt/webserver/v1/RepoStats';
+import type { Long } from '@grpc/proto-loader';
+
+export interface ListResponse {
+ /**
+ * Returned when ListOptions.Field is RepoListFieldRepos.
+ */
+ 'repos'?: (_zoekt_webserver_v1_RepoListEntry)[];
+ /**
+ * ReposMap is set when ListOptions.Field is RepoListFieldReposMap.
+ */
+ 'repos_map'?: ({[key: number]: _zoekt_webserver_v1_MinimalRepoListEntry});
+ 'crashes'?: (number | string | Long);
+ /**
+ * Stats response to a List request.
+ * This is the aggregate RepoStats of all repos matching the input query.
+ */
+ 'stats'?: (_zoekt_webserver_v1_RepoStats | null);
+}
+
+export interface ListResponse__Output {
+ /**
+ * Returned when ListOptions.Field is RepoListFieldRepos.
+ */
+ 'repos': (_zoekt_webserver_v1_RepoListEntry__Output)[];
+ /**
+ * ReposMap is set when ListOptions.Field is RepoListFieldReposMap.
+ */
+ 'repos_map': ({[key: number]: _zoekt_webserver_v1_MinimalRepoListEntry__Output});
+ 'crashes': (number);
+ /**
+ * Stats response to a List request.
+ * This is the aggregate RepoStats of all repos matching the input query.
+ */
+ 'stats': (_zoekt_webserver_v1_RepoStats__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Location.ts b/packages/web/src/proto/zoekt/webserver/v1/Location.ts
new file mode 100644
index 000000000..8f840a7be
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Location.ts
@@ -0,0 +1,32 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+
+export interface Location {
+ /**
+ * 0-based byte offset from the beginning of the file
+ */
+ 'byte_offset'?: (number);
+ /**
+ * 1-based line number from the beginning of the file
+ */
+ 'line_number'?: (number);
+ /**
+ * 1-based column number (in runes) from the beginning of line
+ */
+ 'column'?: (number);
+}
+
+export interface Location__Output {
+ /**
+ * 0-based byte offset from the beginning of the file
+ */
+ 'byte_offset': (number);
+ /**
+ * 1-based line number from the beginning of the file
+ */
+ 'line_number': (number);
+ /**
+ * 1-based column number (in runes) from the beginning of line
+ */
+ 'column': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/MinimalRepoListEntry.ts b/packages/web/src/proto/zoekt/webserver/v1/MinimalRepoListEntry.ts
new file mode 100644
index 000000000..ba3618886
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/MinimalRepoListEntry.ts
@@ -0,0 +1,16 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { RepositoryBranch as _zoekt_webserver_v1_RepositoryBranch, RepositoryBranch__Output as _zoekt_webserver_v1_RepositoryBranch__Output } from '../../../zoekt/webserver/v1/RepositoryBranch';
+import type { Long } from '@grpc/proto-loader';
+
+export interface MinimalRepoListEntry {
+ 'has_symbols'?: (boolean);
+ 'branches'?: (_zoekt_webserver_v1_RepositoryBranch)[];
+ 'index_time_unix'?: (number | string | Long);
+}
+
+export interface MinimalRepoListEntry__Output {
+ 'has_symbols': (boolean);
+ 'branches': (_zoekt_webserver_v1_RepositoryBranch__Output)[];
+ 'index_time_unix': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Not.ts b/packages/web/src/proto/zoekt/webserver/v1/Not.ts
new file mode 100644
index 000000000..6f7e43b70
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Not.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+/**
+ * Not inverts the meaning of its child.
+ */
+export interface Not {
+ 'child'?: (_zoekt_webserver_v1_Q | null);
+}
+
+/**
+ * Not inverts the meaning of its child.
+ */
+export interface Not__Output {
+ 'child': (_zoekt_webserver_v1_Q__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Or.ts b/packages/web/src/proto/zoekt/webserver/v1/Or.ts
new file mode 100644
index 000000000..0e3e5974b
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Or.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+/**
+ * Or is matched when any of its children is matched.
+ */
+export interface Or {
+ 'children'?: (_zoekt_webserver_v1_Q)[];
+}
+
+/**
+ * Or is matched when any of its children is matched.
+ */
+export interface Or__Output {
+ 'children': (_zoekt_webserver_v1_Q__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Progress.ts b/packages/web/src/proto/zoekt/webserver/v1/Progress.ts
new file mode 100644
index 000000000..1d3f36b24
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Progress.ts
@@ -0,0 +1,42 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+
+/**
+ * Progress contains information about the global progress of the running search query.
+ * This is used by the frontend to reorder results and emit them when stable.
+ * Sourcegraph specific: this is used when querying multiple zoekt-webserver instances.
+ */
+export interface Progress {
+ /**
+ * Priority of the shard that was searched.
+ */
+ 'priority'?: (number | string);
+ /**
+ * max_pending_priority is the maximum priority of pending result that is being searched in parallel.
+ * This is used to reorder results when the result set is known to be stable-- that is, when a result's
+ * Priority is greater than the max(MaxPendingPriority) from the latest results of each backend, it can be returned to the user.
+ *
+ * max_pending_priority decreases monotonically in each SearchResult.
+ */
+ 'max_pending_priority'?: (number | string);
+}
+
+/**
+ * Progress contains information about the global progress of the running search query.
+ * This is used by the frontend to reorder results and emit them when stable.
+ * Sourcegraph specific: this is used when querying multiple zoekt-webserver instances.
+ */
+export interface Progress__Output {
+ /**
+ * Priority of the shard that was searched.
+ */
+ 'priority': (number);
+ /**
+ * max_pending_priority is the maximum priority of pending result that is being searched in parallel.
+ * This is used to reorder results when the result set is known to be stable-- that is, when a result's
+ * Priority is greater than the max(MaxPendingPriority) from the latest results of each backend, it can be returned to the user.
+ *
+ * max_pending_priority decreases monotonically in each SearchResult.
+ */
+ 'max_pending_priority': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Q.ts b/packages/web/src/proto/zoekt/webserver/v1/Q.ts
new file mode 100644
index 000000000..bf66e0272
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Q.ts
@@ -0,0 +1,63 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { RawConfig as _zoekt_webserver_v1_RawConfig, RawConfig__Output as _zoekt_webserver_v1_RawConfig__Output } from '../../../zoekt/webserver/v1/RawConfig';
+import type { Regexp as _zoekt_webserver_v1_Regexp, Regexp__Output as _zoekt_webserver_v1_Regexp__Output } from '../../../zoekt/webserver/v1/Regexp';
+import type { Symbol as _zoekt_webserver_v1_Symbol, Symbol__Output as _zoekt_webserver_v1_Symbol__Output } from '../../../zoekt/webserver/v1/Symbol';
+import type { Language as _zoekt_webserver_v1_Language, Language__Output as _zoekt_webserver_v1_Language__Output } from '../../../zoekt/webserver/v1/Language';
+import type { Repo as _zoekt_webserver_v1_Repo, Repo__Output as _zoekt_webserver_v1_Repo__Output } from '../../../zoekt/webserver/v1/Repo';
+import type { RepoRegexp as _zoekt_webserver_v1_RepoRegexp, RepoRegexp__Output as _zoekt_webserver_v1_RepoRegexp__Output } from '../../../zoekt/webserver/v1/RepoRegexp';
+import type { BranchesRepos as _zoekt_webserver_v1_BranchesRepos, BranchesRepos__Output as _zoekt_webserver_v1_BranchesRepos__Output } from '../../../zoekt/webserver/v1/BranchesRepos';
+import type { RepoIds as _zoekt_webserver_v1_RepoIds, RepoIds__Output as _zoekt_webserver_v1_RepoIds__Output } from '../../../zoekt/webserver/v1/RepoIds';
+import type { RepoSet as _zoekt_webserver_v1_RepoSet, RepoSet__Output as _zoekt_webserver_v1_RepoSet__Output } from '../../../zoekt/webserver/v1/RepoSet';
+import type { FileNameSet as _zoekt_webserver_v1_FileNameSet, FileNameSet__Output as _zoekt_webserver_v1_FileNameSet__Output } from '../../../zoekt/webserver/v1/FileNameSet';
+import type { Type as _zoekt_webserver_v1_Type, Type__Output as _zoekt_webserver_v1_Type__Output } from '../../../zoekt/webserver/v1/Type';
+import type { Substring as _zoekt_webserver_v1_Substring, Substring__Output as _zoekt_webserver_v1_Substring__Output } from '../../../zoekt/webserver/v1/Substring';
+import type { And as _zoekt_webserver_v1_And, And__Output as _zoekt_webserver_v1_And__Output } from '../../../zoekt/webserver/v1/And';
+import type { Or as _zoekt_webserver_v1_Or, Or__Output as _zoekt_webserver_v1_Or__Output } from '../../../zoekt/webserver/v1/Or';
+import type { Not as _zoekt_webserver_v1_Not, Not__Output as _zoekt_webserver_v1_Not__Output } from '../../../zoekt/webserver/v1/Not';
+import type { Branch as _zoekt_webserver_v1_Branch, Branch__Output as _zoekt_webserver_v1_Branch__Output } from '../../../zoekt/webserver/v1/Branch';
+import type { Boost as _zoekt_webserver_v1_Boost, Boost__Output as _zoekt_webserver_v1_Boost__Output } from '../../../zoekt/webserver/v1/Boost';
+
+export interface Q {
+ 'raw_config'?: (_zoekt_webserver_v1_RawConfig | null);
+ 'regexp'?: (_zoekt_webserver_v1_Regexp | null);
+ 'symbol'?: (_zoekt_webserver_v1_Symbol | null);
+ 'language'?: (_zoekt_webserver_v1_Language | null);
+ 'const'?: (boolean);
+ 'repo'?: (_zoekt_webserver_v1_Repo | null);
+ 'repo_regexp'?: (_zoekt_webserver_v1_RepoRegexp | null);
+ 'branches_repos'?: (_zoekt_webserver_v1_BranchesRepos | null);
+ 'repo_ids'?: (_zoekt_webserver_v1_RepoIds | null);
+ 'repo_set'?: (_zoekt_webserver_v1_RepoSet | null);
+ 'file_name_set'?: (_zoekt_webserver_v1_FileNameSet | null);
+ 'type'?: (_zoekt_webserver_v1_Type | null);
+ 'substring'?: (_zoekt_webserver_v1_Substring | null);
+ 'and'?: (_zoekt_webserver_v1_And | null);
+ 'or'?: (_zoekt_webserver_v1_Or | null);
+ 'not'?: (_zoekt_webserver_v1_Not | null);
+ 'branch'?: (_zoekt_webserver_v1_Branch | null);
+ 'boost'?: (_zoekt_webserver_v1_Boost | null);
+ 'query'?: "raw_config"|"regexp"|"symbol"|"language"|"const"|"repo"|"repo_regexp"|"branches_repos"|"repo_ids"|"repo_set"|"file_name_set"|"type"|"substring"|"and"|"or"|"not"|"branch"|"boost";
+}
+
+export interface Q__Output {
+ 'raw_config'?: (_zoekt_webserver_v1_RawConfig__Output | null);
+ 'regexp'?: (_zoekt_webserver_v1_Regexp__Output | null);
+ 'symbol'?: (_zoekt_webserver_v1_Symbol__Output | null);
+ 'language'?: (_zoekt_webserver_v1_Language__Output | null);
+ 'const'?: (boolean);
+ 'repo'?: (_zoekt_webserver_v1_Repo__Output | null);
+ 'repo_regexp'?: (_zoekt_webserver_v1_RepoRegexp__Output | null);
+ 'branches_repos'?: (_zoekt_webserver_v1_BranchesRepos__Output | null);
+ 'repo_ids'?: (_zoekt_webserver_v1_RepoIds__Output | null);
+ 'repo_set'?: (_zoekt_webserver_v1_RepoSet__Output | null);
+ 'file_name_set'?: (_zoekt_webserver_v1_FileNameSet__Output | null);
+ 'type'?: (_zoekt_webserver_v1_Type__Output | null);
+ 'substring'?: (_zoekt_webserver_v1_Substring__Output | null);
+ 'and'?: (_zoekt_webserver_v1_And__Output | null);
+ 'or'?: (_zoekt_webserver_v1_Or__Output | null);
+ 'not'?: (_zoekt_webserver_v1_Not__Output | null);
+ 'branch'?: (_zoekt_webserver_v1_Branch__Output | null);
+ 'boost'?: (_zoekt_webserver_v1_Boost__Output | null);
+ 'query'?: "raw_config"|"regexp"|"symbol"|"language"|"const"|"repo"|"repo_regexp"|"branches_repos"|"repo_ids"|"repo_set"|"file_name_set"|"type"|"substring"|"and"|"or"|"not"|"branch"|"boost";
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Range.ts b/packages/web/src/proto/zoekt/webserver/v1/Range.ts
new file mode 100644
index 000000000..a6e69d3d8
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Range.ts
@@ -0,0 +1,25 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Location as _zoekt_webserver_v1_Location, Location__Output as _zoekt_webserver_v1_Location__Output } from '../../../zoekt/webserver/v1/Location';
+
+export interface Range {
+ /**
+ * The inclusive beginning of the range.
+ */
+ 'start'?: (_zoekt_webserver_v1_Location | null);
+ /**
+ * The exclusive end of the range.
+ */
+ 'end'?: (_zoekt_webserver_v1_Location | null);
+}
+
+export interface Range__Output {
+ /**
+ * The inclusive beginning of the range.
+ */
+ 'start': (_zoekt_webserver_v1_Location__Output | null);
+ /**
+ * The exclusive end of the range.
+ */
+ 'end': (_zoekt_webserver_v1_Location__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RawConfig.ts b/packages/web/src/proto/zoekt/webserver/v1/RawConfig.ts
new file mode 100644
index 000000000..6379dbbe7
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RawConfig.ts
@@ -0,0 +1,46 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+export const _zoekt_webserver_v1_RawConfig_Flag = {
+ FLAG_UNKNOWN_UNSPECIFIED: 'FLAG_UNKNOWN_UNSPECIFIED',
+ FLAG_ONLY_PUBLIC: 'FLAG_ONLY_PUBLIC',
+ FLAG_ONLY_PRIVATE: 'FLAG_ONLY_PRIVATE',
+ FLAG_ONLY_FORKS: 'FLAG_ONLY_FORKS',
+ FLAG_NO_FORKS: 'FLAG_NO_FORKS',
+ FLAG_ONLY_ARCHIVED: 'FLAG_ONLY_ARCHIVED',
+ FLAG_NO_ARCHIVED: 'FLAG_NO_ARCHIVED',
+} as const;
+
+export type _zoekt_webserver_v1_RawConfig_Flag =
+ | 'FLAG_UNKNOWN_UNSPECIFIED'
+ | 0
+ | 'FLAG_ONLY_PUBLIC'
+ | 1
+ | 'FLAG_ONLY_PRIVATE'
+ | 2
+ | 'FLAG_ONLY_FORKS'
+ | 4
+ | 'FLAG_NO_FORKS'
+ | 8
+ | 'FLAG_ONLY_ARCHIVED'
+ | 16
+ | 'FLAG_NO_ARCHIVED'
+ | 32
+
+export type _zoekt_webserver_v1_RawConfig_Flag__Output = typeof _zoekt_webserver_v1_RawConfig_Flag[keyof typeof _zoekt_webserver_v1_RawConfig_Flag]
+
+/**
+ * RawConfig filters repositories based on their encoded RawConfig map.
+ */
+export interface RawConfig {
+ 'flags'?: (_zoekt_webserver_v1_RawConfig_Flag)[];
+}
+
+/**
+ * RawConfig filters repositories based on their encoded RawConfig map.
+ */
+export interface RawConfig__Output {
+ 'flags': (_zoekt_webserver_v1_RawConfig_Flag__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Regexp.ts b/packages/web/src/proto/zoekt/webserver/v1/Regexp.ts
new file mode 100644
index 000000000..67582b031
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Regexp.ts
@@ -0,0 +1,22 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * Regexp is a query looking for regular expressions matches.
+ */
+export interface Regexp {
+ 'regexp'?: (string);
+ 'file_name'?: (boolean);
+ 'content'?: (boolean);
+ 'case_sensitive'?: (boolean);
+}
+
+/**
+ * Regexp is a query looking for regular expressions matches.
+ */
+export interface Regexp__Output {
+ 'regexp': (string);
+ 'file_name': (boolean);
+ 'content': (boolean);
+ 'case_sensitive': (boolean);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Repo.ts b/packages/web/src/proto/zoekt/webserver/v1/Repo.ts
new file mode 100644
index 000000000..a10958831
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Repo.ts
@@ -0,0 +1,10 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+export interface Repo {
+ 'regexp'?: (string);
+}
+
+export interface Repo__Output {
+ 'regexp': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepoIds.ts b/packages/web/src/proto/zoekt/webserver/v1/RepoIds.ts
new file mode 100644
index 000000000..7ba23d27d
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepoIds.ts
@@ -0,0 +1,24 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * Similar to BranchRepos but will be used to match only by repoid and
+ * therefore matches all branches
+ */
+export interface RepoIds {
+ /**
+ * a serialized roaring bitmap of the target repo ids
+ */
+ 'repos'?: (Buffer | Uint8Array | string);
+}
+
+/**
+ * Similar to BranchRepos but will be used to match only by repoid and
+ * therefore matches all branches
+ */
+export interface RepoIds__Output {
+ /**
+ * a serialized roaring bitmap of the target repo ids
+ */
+ 'repos': (Buffer);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepoListEntry.ts b/packages/web/src/proto/zoekt/webserver/v1/RepoListEntry.ts
new file mode 100644
index 000000000..a216311df
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepoListEntry.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Repository as _zoekt_webserver_v1_Repository, Repository__Output as _zoekt_webserver_v1_Repository__Output } from '../../../zoekt/webserver/v1/Repository';
+import type { IndexMetadata as _zoekt_webserver_v1_IndexMetadata, IndexMetadata__Output as _zoekt_webserver_v1_IndexMetadata__Output } from '../../../zoekt/webserver/v1/IndexMetadata';
+import type { RepoStats as _zoekt_webserver_v1_RepoStats, RepoStats__Output as _zoekt_webserver_v1_RepoStats__Output } from '../../../zoekt/webserver/v1/RepoStats';
+
+export interface RepoListEntry {
+ 'repository'?: (_zoekt_webserver_v1_Repository | null);
+ 'index_metadata'?: (_zoekt_webserver_v1_IndexMetadata | null);
+ 'stats'?: (_zoekt_webserver_v1_RepoStats | null);
+}
+
+export interface RepoListEntry__Output {
+ 'repository': (_zoekt_webserver_v1_Repository__Output | null);
+ 'index_metadata': (_zoekt_webserver_v1_IndexMetadata__Output | null);
+ 'stats': (_zoekt_webserver_v1_RepoStats__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepoRegexp.ts b/packages/web/src/proto/zoekt/webserver/v1/RepoRegexp.ts
new file mode 100644
index 000000000..1ee2da099
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepoRegexp.ts
@@ -0,0 +1,10 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+export interface RepoRegexp {
+ 'regexp'?: (string);
+}
+
+export interface RepoRegexp__Output {
+ 'regexp': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepoSet.ts b/packages/web/src/proto/zoekt/webserver/v1/RepoSet.ts
new file mode 100644
index 000000000..3638f1dca
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepoSet.ts
@@ -0,0 +1,16 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+/**
+ * RepoSet is a list of repos to match.
+ */
+export interface RepoSet {
+ 'set'?: ({[key: string]: boolean});
+}
+
+/**
+ * RepoSet is a list of repos to match.
+ */
+export interface RepoSet__Output {
+ 'set': ({[key: string]: boolean});
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepoStats.ts b/packages/web/src/proto/zoekt/webserver/v1/RepoStats.ts
new file mode 100644
index 000000000..db507c230
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepoStats.ts
@@ -0,0 +1,97 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Long } from '@grpc/proto-loader';
+
+/**
+ * RepoStats is a collection of statistics for a set of repositories.
+ */
+export interface RepoStats {
+ /**
+ * repos is used for aggregrating the number of repositories.
+ */
+ 'repos'?: (number | string | Long);
+ /**
+ * shards is the total number of search shards.
+ */
+ 'shards'?: (number | string | Long);
+ /**
+ * documents holds the number of documents or files.
+ */
+ 'documents'?: (number | string | Long);
+ /**
+ * index_bytes is the amount of RAM used for index overhead.
+ */
+ 'index_bytes'?: (number | string | Long);
+ /**
+ * content_bytes is the amount of RAM used for raw content.
+ */
+ 'content_bytes'?: (number | string | Long);
+ /**
+ * new_lines_count is the number of newlines "\n" that appear in the zoekt
+ * indexed documents. This is not exactly the same as line count, since it
+ * will not include lines not terminated by "\n" (eg a file with no "\n", or
+ * a final line without "\n"). Note: Zoekt deduplicates documents across
+ * branches, so if a path has the same contents on multiple branches, there
+ * is only one document for it. As such that document's newlines is only
+ * counted once. See DefaultBranchNewLinesCount and AllBranchesNewLinesCount
+ * for counts which do not deduplicate.
+ */
+ 'new_lines_count'?: (number | string | Long);
+ /**
+ * default_branch_new_lines_count is the number of newlines "\n" in the default
+ * branch.
+ */
+ 'default_branch_new_lines_count'?: (number | string | Long);
+ /**
+ * other_branches_new_lines_count is the number of newlines "\n" in all branches
+ * except the default branch.
+ */
+ 'other_branches_new_lines_count'?: (number | string | Long);
+}
+
+/**
+ * RepoStats is a collection of statistics for a set of repositories.
+ */
+export interface RepoStats__Output {
+ /**
+ * repos is used for aggregrating the number of repositories.
+ */
+ 'repos': (number);
+ /**
+ * shards is the total number of search shards.
+ */
+ 'shards': (number);
+ /**
+ * documents holds the number of documents or files.
+ */
+ 'documents': (number);
+ /**
+ * index_bytes is the amount of RAM used for index overhead.
+ */
+ 'index_bytes': (number);
+ /**
+ * content_bytes is the amount of RAM used for raw content.
+ */
+ 'content_bytes': (number);
+ /**
+ * new_lines_count is the number of newlines "\n" that appear in the zoekt
+ * indexed documents. This is not exactly the same as line count, since it
+ * will not include lines not terminated by "\n" (eg a file with no "\n", or
+ * a final line without "\n"). Note: Zoekt deduplicates documents across
+ * branches, so if a path has the same contents on multiple branches, there
+ * is only one document for it. As such that document's newlines is only
+ * counted once. See DefaultBranchNewLinesCount and AllBranchesNewLinesCount
+ * for counts which do not deduplicate.
+ */
+ 'new_lines_count': (number);
+ /**
+ * default_branch_new_lines_count is the number of newlines "\n" in the default
+ * branch.
+ */
+ 'default_branch_new_lines_count': (number);
+ /**
+ * other_branches_new_lines_count is the number of newlines "\n" in all branches
+ * except the default branch.
+ */
+ 'other_branches_new_lines_count': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Repository.ts b/packages/web/src/proto/zoekt/webserver/v1/Repository.ts
new file mode 100644
index 000000000..347f26f34
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Repository.ts
@@ -0,0 +1,184 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { RepositoryBranch as _zoekt_webserver_v1_RepositoryBranch, RepositoryBranch__Output as _zoekt_webserver_v1_RepositoryBranch__Output } from '../../../zoekt/webserver/v1/RepositoryBranch';
+import type { Repository as _zoekt_webserver_v1_Repository, Repository__Output as _zoekt_webserver_v1_Repository__Output } from '../../../zoekt/webserver/v1/Repository';
+import type { Timestamp as _google_protobuf_Timestamp, Timestamp__Output as _google_protobuf_Timestamp__Output } from '../../../google/protobuf/Timestamp';
+import type { Long } from '@grpc/proto-loader';
+
+export interface Repository {
+ /**
+ * Sourcegraph's repository ID
+ */
+ 'id'?: (number);
+ /**
+ * The repository name
+ */
+ 'name'?: (string);
+ /**
+ * The repository URL.
+ */
+ 'url'?: (string);
+ /**
+ * The physical source where this repo came from, eg. full
+ * path to the zip filename or git repository directory. This
+ * will not be exposed in the UI, but can be used to detect
+ * orphaned index shards.
+ */
+ 'source'?: (string);
+ /**
+ * The branches indexed in this repo.
+ */
+ 'branches'?: (_zoekt_webserver_v1_RepositoryBranch)[];
+ /**
+ * Nil if this is not the super project.
+ */
+ 'sub_repo_map'?: ({[key: string]: _zoekt_webserver_v1_Repository});
+ /**
+ * URL template to link to the commit of a branch
+ */
+ 'commit_url_template'?: (string);
+ /**
+ * The repository URL for getting to a file. Has access to
+ * {{.Version}}, {{.Path}}
+ */
+ 'file_url_template'?: (string);
+ /**
+ * The URL fragment to add to a file URL for line numbers. has
+ * access to {{.LineNumber}}. The fragment should include the
+ * separator, generally '#' or ';'.
+ */
+ 'line_fragment_template'?: (string);
+ /**
+ * Perf optimization: priority is set when we load the shard. It corresponds to
+ * the value of "priority" stored in RawConfig.
+ */
+ 'priority'?: (number | string);
+ /**
+ * All zoekt.* configuration settings.
+ */
+ 'raw_config'?: ({[key: string]: string});
+ /**
+ * Importance of the repository, bigger is more important
+ */
+ 'rank'?: (number);
+ /**
+ * index_options is a hash of the options used to create the index for the
+ * repo.
+ */
+ 'index_options'?: (string);
+ /**
+ * has_symbols is true if this repository has indexed ctags
+ * output. Sourcegraph specific: This field is more appropriate for
+ * IndexMetadata. However, we store it here since the Sourcegraph frontend
+ * can read this structure but not IndexMetadata.
+ */
+ 'has_symbols'?: (boolean);
+ /**
+ * tombstone is true if we are not allowed to search this repo.
+ */
+ 'tombstone'?: (boolean);
+ /**
+ * latest_commit_date is the date of the latest commit among all indexed Branches.
+ * The date might be time.Time's 0-value if the repository was last indexed
+ * before this field was added.
+ */
+ 'latest_commit_date'?: (_google_protobuf_Timestamp | null);
+ /**
+ * file_tombstones is a set of file paths that should be ignored across all branches
+ * in this shard.
+ */
+ 'file_tombstones'?: (string)[];
+ /**
+ * tenant_id is the tenant ID of the repository.
+ */
+ 'tenant_id'?: (number | string | Long);
+}
+
+export interface Repository__Output {
+ /**
+ * Sourcegraph's repository ID
+ */
+ 'id': (number);
+ /**
+ * The repository name
+ */
+ 'name': (string);
+ /**
+ * The repository URL.
+ */
+ 'url': (string);
+ /**
+ * The physical source where this repo came from, eg. full
+ * path to the zip filename or git repository directory. This
+ * will not be exposed in the UI, but can be used to detect
+ * orphaned index shards.
+ */
+ 'source': (string);
+ /**
+ * The branches indexed in this repo.
+ */
+ 'branches': (_zoekt_webserver_v1_RepositoryBranch__Output)[];
+ /**
+ * Nil if this is not the super project.
+ */
+ 'sub_repo_map': ({[key: string]: _zoekt_webserver_v1_Repository__Output});
+ /**
+ * URL template to link to the commit of a branch
+ */
+ 'commit_url_template': (string);
+ /**
+ * The repository URL for getting to a file. Has access to
+ * {{.Version}}, {{.Path}}
+ */
+ 'file_url_template': (string);
+ /**
+ * The URL fragment to add to a file URL for line numbers. has
+ * access to {{.LineNumber}}. The fragment should include the
+ * separator, generally '#' or ';'.
+ */
+ 'line_fragment_template': (string);
+ /**
+ * Perf optimization: priority is set when we load the shard. It corresponds to
+ * the value of "priority" stored in RawConfig.
+ */
+ 'priority': (number);
+ /**
+ * All zoekt.* configuration settings.
+ */
+ 'raw_config': ({[key: string]: string});
+ /**
+ * Importance of the repository, bigger is more important
+ */
+ 'rank': (number);
+ /**
+ * index_options is a hash of the options used to create the index for the
+ * repo.
+ */
+ 'index_options': (string);
+ /**
+ * has_symbols is true if this repository has indexed ctags
+ * output. Sourcegraph specific: This field is more appropriate for
+ * IndexMetadata. However, we store it here since the Sourcegraph frontend
+ * can read this structure but not IndexMetadata.
+ */
+ 'has_symbols': (boolean);
+ /**
+ * tombstone is true if we are not allowed to search this repo.
+ */
+ 'tombstone': (boolean);
+ /**
+ * latest_commit_date is the date of the latest commit among all indexed Branches.
+ * The date might be time.Time's 0-value if the repository was last indexed
+ * before this field was added.
+ */
+ 'latest_commit_date': (_google_protobuf_Timestamp__Output | null);
+ /**
+ * file_tombstones is a set of file paths that should be ignored across all branches
+ * in this shard.
+ */
+ 'file_tombstones': (string)[];
+ /**
+ * tenant_id is the tenant ID of the repository.
+ */
+ 'tenant_id': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/RepositoryBranch.ts b/packages/web/src/proto/zoekt/webserver/v1/RepositoryBranch.ts
new file mode 100644
index 000000000..d18ee9554
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/RepositoryBranch.ts
@@ -0,0 +1,20 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+
+/**
+ * RepositoryBranch describes an indexed branch, which is a name
+ * combined with a version.
+ */
+export interface RepositoryBranch {
+ 'name'?: (string);
+ 'version'?: (string);
+}
+
+/**
+ * RepositoryBranch describes an indexed branch, which is a name
+ * combined with a version.
+ */
+export interface RepositoryBranch__Output {
+ 'name': (string);
+ 'version': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/SearchOptions.ts b/packages/web/src/proto/zoekt/webserver/v1/SearchOptions.ts
new file mode 100644
index 000000000..a3fea5a21
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/SearchOptions.ts
@@ -0,0 +1,156 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Duration as _google_protobuf_Duration, Duration__Output as _google_protobuf_Duration__Output } from '../../../google/protobuf/Duration';
+import type { Long } from '@grpc/proto-loader';
+
+export interface SearchOptions {
+ /**
+ * Return an upper-bound estimate of eligible documents in
+ * stats.ShardFilesConsidered.
+ */
+ 'estimate_doc_count'?: (boolean);
+ /**
+ * Return the whole file.
+ */
+ 'whole'?: (boolean);
+ /**
+ * Maximum number of matches: skip all processing an index
+ * shard after we found this many non-overlapping matches.
+ */
+ 'shard_max_match_count'?: (number | string | Long);
+ /**
+ * Maximum number of matches: stop looking for more matches
+ * once we have this many matches across shards.
+ */
+ 'total_max_match_count'?: (number | string | Long);
+ /**
+ * Maximum number of matches: skip processing documents for a repository in
+ * a shard once we have found ShardRepoMaxMatchCount.
+ *
+ * A compound shard may contain multiple repositories. This will most often
+ * be set to 1 to find all repositories containing a result.
+ */
+ 'shard_repo_max_match_count'?: (number | string | Long);
+ /**
+ * Abort the search after this much time has passed.
+ */
+ 'max_wall_time'?: (_google_protobuf_Duration | null);
+ /**
+ * FlushWallTime if non-zero will stop streaming behaviour at first and
+ * instead will collate and sort results. At FlushWallTime the results will
+ * be sent and then the behaviour will revert to the normal streaming.
+ */
+ 'flush_wall_time'?: (_google_protobuf_Duration | null);
+ /**
+ * Truncates the number of documents (i.e. files) after collating and
+ * sorting the results.
+ */
+ 'max_doc_display_count'?: (number | string | Long);
+ /**
+ * If set to a number greater than zero then up to this many number
+ * of context lines will be added before and after each matched line.
+ * Note that the included context lines might contain matches and
+ * it's up to the consumer of the result to remove those lines.
+ */
+ 'num_context_lines'?: (number | string | Long);
+ /**
+ * If true, ChunkMatches will be returned in each FileMatch rather than LineMatches
+ * EXPERIMENTAL: the behavior of this flag may be changed in future versions.
+ */
+ 'chunk_matches'?: (boolean);
+ /**
+ * Trace turns on opentracing for this request if true and if the Jaeger address was provided as
+ * a command-line flag
+ */
+ 'trace'?: (boolean);
+ /**
+ * If set, the search results will contain debug information for scoring.
+ */
+ 'debug_score'?: (boolean);
+ /**
+ * EXPERIMENTAL. If true, use text search scoring instead of the default scoring formula.
+ * Currently, this treats each match in a file as a term and computes an approximation to BM25.
+ * When enabled, all other scoring signals are ignored, including document ranks.
+ */
+ 'use_bm25_scoring'?: (boolean);
+ /**
+ * Truncates the number of matchs after collating and sorting the results.
+ */
+ 'max_match_display_count'?: (number | string | Long);
+}
+
+export interface SearchOptions__Output {
+ /**
+ * Return an upper-bound estimate of eligible documents in
+ * stats.ShardFilesConsidered.
+ */
+ 'estimate_doc_count': (boolean);
+ /**
+ * Return the whole file.
+ */
+ 'whole': (boolean);
+ /**
+ * Maximum number of matches: skip all processing an index
+ * shard after we found this many non-overlapping matches.
+ */
+ 'shard_max_match_count': (number);
+ /**
+ * Maximum number of matches: stop looking for more matches
+ * once we have this many matches across shards.
+ */
+ 'total_max_match_count': (number);
+ /**
+ * Maximum number of matches: skip processing documents for a repository in
+ * a shard once we have found ShardRepoMaxMatchCount.
+ *
+ * A compound shard may contain multiple repositories. This will most often
+ * be set to 1 to find all repositories containing a result.
+ */
+ 'shard_repo_max_match_count': (number);
+ /**
+ * Abort the search after this much time has passed.
+ */
+ 'max_wall_time': (_google_protobuf_Duration__Output | null);
+ /**
+ * FlushWallTime if non-zero will stop streaming behaviour at first and
+ * instead will collate and sort results. At FlushWallTime the results will
+ * be sent and then the behaviour will revert to the normal streaming.
+ */
+ 'flush_wall_time': (_google_protobuf_Duration__Output | null);
+ /**
+ * Truncates the number of documents (i.e. files) after collating and
+ * sorting the results.
+ */
+ 'max_doc_display_count': (number);
+ /**
+ * If set to a number greater than zero then up to this many number
+ * of context lines will be added before and after each matched line.
+ * Note that the included context lines might contain matches and
+ * it's up to the consumer of the result to remove those lines.
+ */
+ 'num_context_lines': (number);
+ /**
+ * If true, ChunkMatches will be returned in each FileMatch rather than LineMatches
+ * EXPERIMENTAL: the behavior of this flag may be changed in future versions.
+ */
+ 'chunk_matches': (boolean);
+ /**
+ * Trace turns on opentracing for this request if true and if the Jaeger address was provided as
+ * a command-line flag
+ */
+ 'trace': (boolean);
+ /**
+ * If set, the search results will contain debug information for scoring.
+ */
+ 'debug_score': (boolean);
+ /**
+ * EXPERIMENTAL. If true, use text search scoring instead of the default scoring formula.
+ * Currently, this treats each match in a file as a term and computes an approximation to BM25.
+ * When enabled, all other scoring signals are ignored, including document ranks.
+ */
+ 'use_bm25_scoring': (boolean);
+ /**
+ * Truncates the number of matchs after collating and sorting the results.
+ */
+ 'max_match_display_count': (number);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/SearchRequest.ts b/packages/web/src/proto/zoekt/webserver/v1/SearchRequest.ts
new file mode 100644
index 000000000..39a9fe3cc
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/SearchRequest.ts
@@ -0,0 +1,14 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+import type { SearchOptions as _zoekt_webserver_v1_SearchOptions, SearchOptions__Output as _zoekt_webserver_v1_SearchOptions__Output } from '../../../zoekt/webserver/v1/SearchOptions';
+
+export interface SearchRequest {
+ 'query'?: (_zoekt_webserver_v1_Q | null);
+ 'opts'?: (_zoekt_webserver_v1_SearchOptions | null);
+}
+
+export interface SearchRequest__Output {
+ 'query': (_zoekt_webserver_v1_Q__Output | null);
+ 'opts': (_zoekt_webserver_v1_SearchOptions__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/SearchResponse.ts b/packages/web/src/proto/zoekt/webserver/v1/SearchResponse.ts
new file mode 100644
index 000000000..51a64177c
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/SearchResponse.ts
@@ -0,0 +1,17 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Stats as _zoekt_webserver_v1_Stats, Stats__Output as _zoekt_webserver_v1_Stats__Output } from '../../../zoekt/webserver/v1/Stats';
+import type { Progress as _zoekt_webserver_v1_Progress, Progress__Output as _zoekt_webserver_v1_Progress__Output } from '../../../zoekt/webserver/v1/Progress';
+import type { FileMatch as _zoekt_webserver_v1_FileMatch, FileMatch__Output as _zoekt_webserver_v1_FileMatch__Output } from '../../../zoekt/webserver/v1/FileMatch';
+
+export interface SearchResponse {
+ 'stats'?: (_zoekt_webserver_v1_Stats | null);
+ 'progress'?: (_zoekt_webserver_v1_Progress | null);
+ 'files'?: (_zoekt_webserver_v1_FileMatch)[];
+}
+
+export interface SearchResponse__Output {
+ 'stats': (_zoekt_webserver_v1_Stats__Output | null);
+ 'progress': (_zoekt_webserver_v1_Progress__Output | null);
+ 'files': (_zoekt_webserver_v1_FileMatch__Output)[];
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Stats.ts b/packages/web/src/proto/zoekt/webserver/v1/Stats.ts
new file mode 100644
index 000000000..348eea642
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Stats.ts
@@ -0,0 +1,181 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { Duration as _google_protobuf_Duration, Duration__Output as _google_protobuf_Duration__Output } from '../../../google/protobuf/Duration';
+import type { FlushReason as _zoekt_webserver_v1_FlushReason, FlushReason__Output as _zoekt_webserver_v1_FlushReason__Output } from '../../../zoekt/webserver/v1/FlushReason';
+import type { Long } from '@grpc/proto-loader';
+
+export interface Stats {
+ /**
+ * Amount of I/O for reading contents.
+ */
+ 'content_bytes_loaded'?: (number | string | Long);
+ /**
+ * Amount of I/O for reading from index.
+ */
+ 'index_bytes_loaded'?: (number | string | Long);
+ /**
+ * Number of search shards that had a crash.
+ */
+ 'crashes'?: (number | string | Long);
+ /**
+ * Wall clock time for this search
+ */
+ 'duration'?: (_google_protobuf_Duration | null);
+ /**
+ * Number of files containing a match.
+ */
+ 'file_count'?: (number | string | Long);
+ /**
+ * Number of files in shards that we considered.
+ */
+ 'shard_files_considered'?: (number | string | Long);
+ /**
+ * Files that we evaluated. Equivalent to files for which all
+ * atom matches (including negations) evaluated to true.
+ */
+ 'files_considered'?: (number | string | Long);
+ /**
+ * Files for which we loaded file content to verify substring matches
+ */
+ 'files_loaded'?: (number | string | Long);
+ /**
+ * Candidate files whose contents weren't examined because we
+ * gathered enough matches.
+ */
+ 'files_skipped'?: (number | string | Long);
+ /**
+ * Shards that we scanned to find matches.
+ */
+ 'shards_scanned'?: (number | string | Long);
+ /**
+ * Shards that we did not process because a query was canceled.
+ */
+ 'shards_skipped'?: (number | string | Long);
+ /**
+ * Shards that we did not process because the query was rejected by the
+ * ngram filter indicating it had no matches.
+ */
+ 'shards_skipped_filter'?: (number | string | Long);
+ /**
+ * Number of non-overlapping matches
+ */
+ 'match_count'?: (number | string | Long);
+ /**
+ * Number of candidate matches as a result of searching ngrams.
+ */
+ 'ngram_matches'?: (number | string | Long);
+ /**
+ * Wall clock time for queued search.
+ */
+ 'wait'?: (_google_protobuf_Duration | null);
+ /**
+ * Number of times regexp was called on files that we evaluated.
+ */
+ 'regexps_considered'?: (number | string | Long);
+ /**
+ * FlushReason explains why results were flushed.
+ */
+ 'flush_reason'?: (_zoekt_webserver_v1_FlushReason);
+ /**
+ * NgramLookups is the number of times we accessed an ngram in the index.
+ */
+ 'ngram_lookups'?: (number | string | Long);
+ /**
+ * Aggregate wall clock time spent constructing and pruning the match tree.
+ * This accounts for time such as lookups in the trigram index.
+ */
+ 'match_tree_construction'?: (_google_protobuf_Duration | null);
+ /**
+ * Aggregate wall clock time spent searching the match tree. This accounts
+ * for the bulk of search work done looking for matches.
+ */
+ 'match_tree_search'?: (_google_protobuf_Duration | null);
+}
+
+export interface Stats__Output {
+ /**
+ * Amount of I/O for reading contents.
+ */
+ 'content_bytes_loaded': (number);
+ /**
+ * Amount of I/O for reading from index.
+ */
+ 'index_bytes_loaded': (number);
+ /**
+ * Number of search shards that had a crash.
+ */
+ 'crashes': (number);
+ /**
+ * Wall clock time for this search
+ */
+ 'duration': (_google_protobuf_Duration__Output | null);
+ /**
+ * Number of files containing a match.
+ */
+ 'file_count': (number);
+ /**
+ * Number of files in shards that we considered.
+ */
+ 'shard_files_considered': (number);
+ /**
+ * Files that we evaluated. Equivalent to files for which all
+ * atom matches (including negations) evaluated to true.
+ */
+ 'files_considered': (number);
+ /**
+ * Files for which we loaded file content to verify substring matches
+ */
+ 'files_loaded': (number);
+ /**
+ * Candidate files whose contents weren't examined because we
+ * gathered enough matches.
+ */
+ 'files_skipped': (number);
+ /**
+ * Shards that we scanned to find matches.
+ */
+ 'shards_scanned': (number);
+ /**
+ * Shards that we did not process because a query was canceled.
+ */
+ 'shards_skipped': (number);
+ /**
+ * Shards that we did not process because the query was rejected by the
+ * ngram filter indicating it had no matches.
+ */
+ 'shards_skipped_filter': (number);
+ /**
+ * Number of non-overlapping matches
+ */
+ 'match_count': (number);
+ /**
+ * Number of candidate matches as a result of searching ngrams.
+ */
+ 'ngram_matches': (number);
+ /**
+ * Wall clock time for queued search.
+ */
+ 'wait': (_google_protobuf_Duration__Output | null);
+ /**
+ * Number of times regexp was called on files that we evaluated.
+ */
+ 'regexps_considered': (number);
+ /**
+ * FlushReason explains why results were flushed.
+ */
+ 'flush_reason': (_zoekt_webserver_v1_FlushReason__Output);
+ /**
+ * NgramLookups is the number of times we accessed an ngram in the index.
+ */
+ 'ngram_lookups': (number);
+ /**
+ * Aggregate wall clock time spent constructing and pruning the match tree.
+ * This accounts for time such as lookups in the trigram index.
+ */
+ 'match_tree_construction': (_google_protobuf_Duration__Output | null);
+ /**
+ * Aggregate wall clock time spent searching the match tree. This accounts
+ * for the bulk of search work done looking for matches.
+ */
+ 'match_tree_search': (_google_protobuf_Duration__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/StreamSearchRequest.ts b/packages/web/src/proto/zoekt/webserver/v1/StreamSearchRequest.ts
new file mode 100644
index 000000000..219e34773
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/StreamSearchRequest.ts
@@ -0,0 +1,11 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { SearchRequest as _zoekt_webserver_v1_SearchRequest, SearchRequest__Output as _zoekt_webserver_v1_SearchRequest__Output } from '../../../zoekt/webserver/v1/SearchRequest';
+
+export interface StreamSearchRequest {
+ 'request'?: (_zoekt_webserver_v1_SearchRequest | null);
+}
+
+export interface StreamSearchRequest__Output {
+ 'request': (_zoekt_webserver_v1_SearchRequest__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/StreamSearchResponse.ts b/packages/web/src/proto/zoekt/webserver/v1/StreamSearchResponse.ts
new file mode 100644
index 000000000..7b9276ee3
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/StreamSearchResponse.ts
@@ -0,0 +1,11 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type { SearchResponse as _zoekt_webserver_v1_SearchResponse, SearchResponse__Output as _zoekt_webserver_v1_SearchResponse__Output } from '../../../zoekt/webserver/v1/SearchResponse';
+
+export interface StreamSearchResponse {
+ 'response_chunk'?: (_zoekt_webserver_v1_SearchResponse | null);
+}
+
+export interface StreamSearchResponse__Output {
+ 'response_chunk': (_zoekt_webserver_v1_SearchResponse__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Substring.ts b/packages/web/src/proto/zoekt/webserver/v1/Substring.ts
new file mode 100644
index 000000000..07520aa32
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Substring.ts
@@ -0,0 +1,28 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+
+export interface Substring {
+ 'pattern'?: (string);
+ 'case_sensitive'?: (boolean);
+ /**
+ * Match only filename
+ */
+ 'file_name'?: (boolean);
+ /**
+ * Match only content
+ */
+ 'content'?: (boolean);
+}
+
+export interface Substring__Output {
+ 'pattern': (string);
+ 'case_sensitive': (boolean);
+ /**
+ * Match only filename
+ */
+ 'file_name': (boolean);
+ /**
+ * Match only content
+ */
+ 'content': (boolean);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Symbol.ts b/packages/web/src/proto/zoekt/webserver/v1/Symbol.ts
new file mode 100644
index 000000000..7ccf5c71d
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Symbol.ts
@@ -0,0 +1,11 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+export interface Symbol {
+ 'expr'?: (_zoekt_webserver_v1_Q | null);
+}
+
+export interface Symbol__Output {
+ 'expr': (_zoekt_webserver_v1_Q__Output | null);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/SymbolInfo.ts b/packages/web/src/proto/zoekt/webserver/v1/SymbolInfo.ts
new file mode 100644
index 000000000..5622d50fe
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/SymbolInfo.ts
@@ -0,0 +1,16 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+
+export interface SymbolInfo {
+ 'sym'?: (string);
+ 'kind'?: (string);
+ 'parent'?: (string);
+ 'parent_kind'?: (string);
+}
+
+export interface SymbolInfo__Output {
+ 'sym': (string);
+ 'kind': (string);
+ 'parent': (string);
+ 'parent_kind': (string);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/Type.ts b/packages/web/src/proto/zoekt/webserver/v1/Type.ts
new file mode 100644
index 000000000..6f0163d33
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/Type.ts
@@ -0,0 +1,46 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+import type { Q as _zoekt_webserver_v1_Q, Q__Output as _zoekt_webserver_v1_Q__Output } from '../../../zoekt/webserver/v1/Q';
+
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/query.proto
+
+export const _zoekt_webserver_v1_Type_Kind = {
+ KIND_UNKNOWN_UNSPECIFIED: 'KIND_UNKNOWN_UNSPECIFIED',
+ KIND_FILE_MATCH: 'KIND_FILE_MATCH',
+ KIND_FILE_NAME: 'KIND_FILE_NAME',
+ KIND_REPO: 'KIND_REPO',
+} as const;
+
+export type _zoekt_webserver_v1_Type_Kind =
+ | 'KIND_UNKNOWN_UNSPECIFIED'
+ | 0
+ | 'KIND_FILE_MATCH'
+ | 1
+ | 'KIND_FILE_NAME'
+ | 2
+ | 'KIND_REPO'
+ | 3
+
+export type _zoekt_webserver_v1_Type_Kind__Output = typeof _zoekt_webserver_v1_Type_Kind[keyof typeof _zoekt_webserver_v1_Type_Kind]
+
+/**
+ * Type changes the result type returned.
+ */
+export interface Type {
+ 'child'?: (_zoekt_webserver_v1_Q | null);
+ /**
+ * TODO: type constants
+ */
+ 'type'?: (_zoekt_webserver_v1_Type_Kind);
+}
+
+/**
+ * Type changes the result type returned.
+ */
+export interface Type__Output {
+ 'child': (_zoekt_webserver_v1_Q__Output | null);
+ /**
+ * TODO: type constants
+ */
+ 'type': (_zoekt_webserver_v1_Type_Kind__Output);
+}
diff --git a/packages/web/src/proto/zoekt/webserver/v1/WebserverService.ts b/packages/web/src/proto/zoekt/webserver/v1/WebserverService.ts
new file mode 100644
index 000000000..c7c80eab5
--- /dev/null
+++ b/packages/web/src/proto/zoekt/webserver/v1/WebserverService.ts
@@ -0,0 +1,63 @@
+// Original file: ../../vendor/zoekt/grpc/protos/zoekt/webserver/v1/webserver.proto
+
+import type * as grpc from '@grpc/grpc-js'
+import type { MethodDefinition } from '@grpc/proto-loader'
+import type { ListRequest as _zoekt_webserver_v1_ListRequest, ListRequest__Output as _zoekt_webserver_v1_ListRequest__Output } from '../../../zoekt/webserver/v1/ListRequest';
+import type { ListResponse as _zoekt_webserver_v1_ListResponse, ListResponse__Output as _zoekt_webserver_v1_ListResponse__Output } from '../../../zoekt/webserver/v1/ListResponse';
+import type { SearchRequest as _zoekt_webserver_v1_SearchRequest, SearchRequest__Output as _zoekt_webserver_v1_SearchRequest__Output } from '../../../zoekt/webserver/v1/SearchRequest';
+import type { SearchResponse as _zoekt_webserver_v1_SearchResponse, SearchResponse__Output as _zoekt_webserver_v1_SearchResponse__Output } from '../../../zoekt/webserver/v1/SearchResponse';
+import type { StreamSearchRequest as _zoekt_webserver_v1_StreamSearchRequest, StreamSearchRequest__Output as _zoekt_webserver_v1_StreamSearchRequest__Output } from '../../../zoekt/webserver/v1/StreamSearchRequest';
+import type { StreamSearchResponse as _zoekt_webserver_v1_StreamSearchResponse, StreamSearchResponse__Output as _zoekt_webserver_v1_StreamSearchResponse__Output } from '../../../zoekt/webserver/v1/StreamSearchResponse';
+
+export interface WebserverServiceClient extends grpc.Client {
+ /**
+ * List lists repositories. The query `q` can only contain
+ * query.Repo atoms.
+ */
+ List(argument: _zoekt_webserver_v1_ListRequest, metadata: grpc.Metadata, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ List(argument: _zoekt_webserver_v1_ListRequest, metadata: grpc.Metadata, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ List(argument: _zoekt_webserver_v1_ListRequest, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ List(argument: _zoekt_webserver_v1_ListRequest, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ /**
+ * List lists repositories. The query `q` can only contain
+ * query.Repo atoms.
+ */
+ list(argument: _zoekt_webserver_v1_ListRequest, metadata: grpc.Metadata, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ list(argument: _zoekt_webserver_v1_ListRequest, metadata: grpc.Metadata, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ list(argument: _zoekt_webserver_v1_ListRequest, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+ list(argument: _zoekt_webserver_v1_ListRequest, callback: grpc.requestCallback<_zoekt_webserver_v1_ListResponse__Output>): grpc.ClientUnaryCall;
+
+ Search(argument: _zoekt_webserver_v1_SearchRequest, metadata: grpc.Metadata, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ Search(argument: _zoekt_webserver_v1_SearchRequest, metadata: grpc.Metadata, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ Search(argument: _zoekt_webserver_v1_SearchRequest, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ Search(argument: _zoekt_webserver_v1_SearchRequest, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ search(argument: _zoekt_webserver_v1_SearchRequest, metadata: grpc.Metadata, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ search(argument: _zoekt_webserver_v1_SearchRequest, metadata: grpc.Metadata, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ search(argument: _zoekt_webserver_v1_SearchRequest, options: grpc.CallOptions, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+ search(argument: _zoekt_webserver_v1_SearchRequest, callback: grpc.requestCallback<_zoekt_webserver_v1_SearchResponse__Output>): grpc.ClientUnaryCall;
+
+ StreamSearch(argument: _zoekt_webserver_v1_StreamSearchRequest, metadata: grpc.Metadata, options?: grpc.CallOptions): grpc.ClientReadableStream<_zoekt_webserver_v1_StreamSearchResponse__Output>;
+ StreamSearch(argument: _zoekt_webserver_v1_StreamSearchRequest, options?: grpc.CallOptions): grpc.ClientReadableStream<_zoekt_webserver_v1_StreamSearchResponse__Output>;
+ streamSearch(argument: _zoekt_webserver_v1_StreamSearchRequest, metadata: grpc.Metadata, options?: grpc.CallOptions): grpc.ClientReadableStream<_zoekt_webserver_v1_StreamSearchResponse__Output>;
+ streamSearch(argument: _zoekt_webserver_v1_StreamSearchRequest, options?: grpc.CallOptions): grpc.ClientReadableStream<_zoekt_webserver_v1_StreamSearchResponse__Output>;
+
+}
+
+export interface WebserverServiceHandlers extends grpc.UntypedServiceImplementation {
+ /**
+ * List lists repositories. The query `q` can only contain
+ * query.Repo atoms.
+ */
+ List: grpc.handleUnaryCall<_zoekt_webserver_v1_ListRequest__Output, _zoekt_webserver_v1_ListResponse>;
+
+ Search: grpc.handleUnaryCall<_zoekt_webserver_v1_SearchRequest__Output, _zoekt_webserver_v1_SearchResponse>;
+
+ StreamSearch: grpc.handleServerStreamingCall<_zoekt_webserver_v1_StreamSearchRequest__Output, _zoekt_webserver_v1_StreamSearchResponse>;
+
+}
+
+export interface WebserverServiceDefinition extends grpc.ServiceDefinition {
+ List: MethodDefinition<_zoekt_webserver_v1_ListRequest, _zoekt_webserver_v1_ListResponse, _zoekt_webserver_v1_ListRequest__Output, _zoekt_webserver_v1_ListResponse__Output>
+ Search: MethodDefinition<_zoekt_webserver_v1_SearchRequest, _zoekt_webserver_v1_SearchResponse, _zoekt_webserver_v1_SearchRequest__Output, _zoekt_webserver_v1_SearchResponse__Output>
+ StreamSearch: MethodDefinition<_zoekt_webserver_v1_StreamSearchRequest, _zoekt_webserver_v1_StreamSearchResponse, _zoekt_webserver_v1_StreamSearchRequest__Output, _zoekt_webserver_v1_StreamSearchResponse__Output>
+}
diff --git a/packages/web/src/withAuthV2.ts b/packages/web/src/withAuthV2.ts
index 1b0555334..f1e229625 100644
--- a/packages/web/src/withAuthV2.ts
+++ b/packages/web/src/withAuthV2.ts
@@ -1,6 +1,6 @@
import { prisma as __unsafePrisma, userScopedPrismaClientExtension } from "@/prisma";
import { hashSecret } from "@sourcebot/shared";
-import { ApiKey, Org, OrgRole, PrismaClient, User } from "@sourcebot/db";
+import { ApiKey, Org, OrgRole, PrismaClient, UserWithAccounts } from "@sourcebot/db";
import { headers } from "next/headers";
import { auth } from "./auth";
import { notAuthenticated, notFound, ServiceError } from "./lib/serviceError";
@@ -11,14 +11,14 @@ import { getOrgMetadata, isServiceError } from "./lib/utils";
import { hasEntitlement } from "@sourcebot/shared";
interface OptionalAuthContext {
- user?: User;
+ user?: UserWithAccounts;
org: Org;
role: OrgRole;
prisma: PrismaClient;
}
interface RequiredAuthContext {
- user: User;
+ user: UserWithAccounts;
org: Org;
role: Exclude;
prisma: PrismaClient;
@@ -88,8 +88,7 @@ export const getAuthContext = async (): Promise account.id);
- const prisma = __unsafePrisma.$extends(userScopedPrismaClientExtension(accountIds)) as PrismaClient;
+ const prisma = __unsafePrisma.$extends(userScopedPrismaClientExtension(user)) as PrismaClient;
return {
user: user ?? undefined,
diff --git a/yarn.lock b/yarn.lock
index f103fe788..41e3339fe 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2338,6 +2338,16 @@ __metadata:
languageName: node
linkType: hard
+"@grpc/grpc-js@npm:^1.14.1":
+ version: 1.14.1
+ resolution: "@grpc/grpc-js@npm:1.14.1"
+ dependencies:
+ "@grpc/proto-loader": "npm:^0.8.0"
+ "@js-sdsl/ordered-map": "npm:^4.4.2"
+ checksum: 10c0/a9a8fc7f4dfa374a34e37350b37ad2c092ed533b203fe16d45ba3220fe38195d17a87527dade2e5546afeeeccfcf68d3e914705d94e44e8df461321b0c02cc7a
+ languageName: node
+ linkType: hard
+
"@grpc/proto-loader@npm:^0.8.0":
version: 0.8.0
resolution: "@grpc/proto-loader@npm:0.8.0"
@@ -2901,17 +2911,10 @@ __metadata:
languageName: node
linkType: hard
-"@lezer/common@npm:^0.15.0, @lezer/common@npm:^0.15.5":
- version: 0.15.12
- resolution: "@lezer/common@npm:0.15.12"
- checksum: 10c0/ed9cbeeee81e4da94a4fba735caddb6aa5d1908a4b2a95737b35492a2b0ec220be792ef7b2d302914904b3d9fa7307f43e8b627973e10b04cd1c1a88e5647622
- languageName: node
- linkType: hard
-
-"@lezer/common@npm:^1.0.0, @lezer/common@npm:^1.0.2, @lezer/common@npm:^1.0.3, @lezer/common@npm:^1.1.0, @lezer/common@npm:^1.2.0, @lezer/common@npm:^1.2.1":
- version: 1.2.3
- resolution: "@lezer/common@npm:1.2.3"
- checksum: 10c0/fe9f8e111080ef94037a34ca2af1221c8d01c1763ba5ecf708a286185c76119509a5d19d924c8842172716716ddce22d7834394670c4a9432f0ba9f3b7c0f50d
+"@lezer/common@npm:1.3.0":
+ version: 1.3.0
+ resolution: "@lezer/common@npm:1.3.0"
+ checksum: 10c0/e164094920761c2f56c8634d0ae9261ea7c5e6b8202aa08773febc59b8d8284dde5bc7a810c9438e27b978e5ad67d0db03af1ed72924df61b8fa2704acb55deb
languageName: node
linkType: hard
@@ -2937,6 +2940,18 @@ __metadata:
languageName: node
linkType: hard
+"@lezer/generator@npm:^1.8.0":
+ version: 1.8.0
+ resolution: "@lezer/generator@npm:1.8.0"
+ dependencies:
+ "@lezer/common": "npm:^1.1.0"
+ "@lezer/lr": "npm:^1.3.0"
+ bin:
+ lezer-generator: src/lezer-generator.cjs
+ checksum: 10c0/c9dab9a27b6b757544f51b1612842ded77db7322d23cfd175274f89d783e0987b106c0f51e1203af74b7e56ccc567e8efd633aaffa2086cb55bfc1e3ea591fa6
+ languageName: node
+ linkType: hard
+
"@lezer/go@npm:^1.0.0":
version: 1.0.0
resolution: "@lezer/go@npm:1.0.0"
@@ -3019,6 +3034,15 @@ __metadata:
languageName: node
linkType: hard
+"@lezer/lr@npm:^1.4.3":
+ version: 1.4.3
+ resolution: "@lezer/lr@npm:1.4.3"
+ dependencies:
+ "@lezer/common": "npm:^1.0.0"
+ checksum: 10c0/3c9fd7eefb0641addfdd0955b4c4014bb8702285c52890b58c937d766320ba2fec8c6b374b46f514079a093c9dd21b6632746a01fed16c250c90d649e5dd12c1
+ languageName: node
+ linkType: hard
+
"@lezer/markdown@npm:^1.0.0":
version: 1.4.2
resolution: "@lezer/markdown@npm:1.4.2"
@@ -7978,6 +8002,19 @@ __metadata:
languageName: unknown
linkType: soft
+"@sourcebot/query-language@workspace:*, @sourcebot/query-language@workspace:packages/queryLanguage":
+ version: 0.0.0-use.local
+ resolution: "@sourcebot/query-language@workspace:packages/queryLanguage"
+ dependencies:
+ "@lezer/common": "npm:^1.3.0"
+ "@lezer/generator": "npm:^1.8.0"
+ "@lezer/lr": "npm:^1.4.3"
+ tsx: "npm:^4.19.1"
+ typescript: "npm:^5.7.3"
+ vitest: "npm:^2.1.9"
+ languageName: unknown
+ linkType: soft
+
"@sourcebot/schemas@workspace:*, @sourcebot/schemas@workspace:packages/schemas":
version: 0.0.0-use.local
resolution: "@sourcebot/schemas@workspace:packages/schemas"
@@ -8060,6 +8097,8 @@ __metadata:
"@codemirror/view": "npm:^6.33.0"
"@eslint/eslintrc": "npm:^3"
"@floating-ui/react": "npm:^0.27.2"
+ "@grpc/grpc-js": "npm:^1.14.1"
+ "@grpc/proto-loader": "npm:^0.8.0"
"@hookform/resolvers": "npm:^3.9.0"
"@iconify/react": "npm:^5.1.0"
"@iizukak/codemirror-lang-wgsl": "npm:^0.3.0"
@@ -8099,6 +8138,7 @@ __metadata:
"@shopify/lang-jsonc": "npm:^1.0.0"
"@sourcebot/codemirror-lang-tcl": "npm:^1.0.12"
"@sourcebot/db": "workspace:*"
+ "@sourcebot/query-language": "workspace:*"
"@sourcebot/schemas": "workspace:*"
"@sourcebot/shared": "workspace:*"
"@ssddanbrown/codemirror-lang-twig": "npm:^1.0.0"