diff --git a/internal/cache/handlers.go b/internal/cache/handlers.go index 44fb396..2cefda4 100644 --- a/internal/cache/handlers.go +++ b/internal/cache/handlers.go @@ -19,6 +19,7 @@ import ( "gopkg.in/yaml.v3" "github.com/dependabot/proxy/internal/ctxdata" + "github.com/dependabot/proxy/internal/gitproto" ) // DB contains the metadata of the disk cache @@ -97,8 +98,12 @@ func key(r *http.Request) Key { k.HeaderHash = hex.EncodeToString(headerHash.Sum(nil)) } if len(data) > 0 { + hashData := data + if gitproto.IsUploadPackRequest(r) { + hashData = gitproto.NormalizeUploadPackBody(data) + } hash := sha256.New() - hash.Write(data) + hash.Write(hashData) k.BodyHash = hex.EncodeToString(hash.Sum(nil)) } return k diff --git a/internal/cache/handlers_test.go b/internal/cache/handlers_test.go index 1bc65fe..a722543 100644 --- a/internal/cache/handlers_test.go +++ b/internal/cache/handlers_test.go @@ -245,6 +245,62 @@ func Test_key(t *testing.T) { t.Error("headerHash should be blank, got", key.HeaderHash) } }) + + // Integration tests for the gitproto hookup. Edge-case behaviour of the + // normalizer itself lives in internal/gitproto. + const upUrl = "https://github.com/octocat/Hello-World.git/git-upload-pack" + const upCT = "application/x-git-upload-pack-request" + mkUpReq := func(url, ct, body string) *http.Request { + r := httptest.NewRequest("POST", url, strings.NewReader(body)) + if ct != "" { + r.Header.Set("Content-Type", ct) + } + return r + } + + t.Run("git-upload-pack: agent= drift collapses to one key", func(t *testing.T) { + body1 := "0080want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.43.0\n" + + "0032have 553c2077f0edc3d5dc5d17262f6aa498e69d6f8e\n0009done\n" + body2 := "0080want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.53.0\n" + + "0032have 553c2077f0edc3d5dc5d17262f6aa498e69d6f8e\n0009done\n" + if key(mkUpReq(upUrl, upCT, body1)) != key(mkUpReq(upUrl, upCT, body2)) { + t.Error("agent-only difference must collapse") + } + }) + + t.Run("git-upload-pack: different haves hash distinctly", func(t *testing.T) { + body1 := "0032want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d\n0000" + + "0032have 553c2077f0edc3d5dc5d17262f6aa498e69d6f8e\n0009done\n" + body2 := "0032want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d\n0000" + + "0032have a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2\n0009done\n" + if key(mkUpReq(upUrl, upCT, body1)) == key(mkUpReq(upUrl, upCT, body2)) { + t.Error("haves shape the upstream pack and must not collapse") + } + }) + + t.Run("git-upload-pack: malformed body falls back to raw hashing", func(t *testing.T) { + if key(mkUpReq(upUrl, upCT, "garbage one")) == key(mkUpReq(upUrl, upCT, "garbage two")) { + t.Error("malformed bodies must hash distinctly") + } + }) + + t.Run("non-git POST is not normalized even with similar substrings", func(t *testing.T) { + const u = "https://api.github.com/graphql" + k1 := key(httptest.NewRequest("POST", u, strings.NewReader(`{"q":"have stuff agent=foo"}`))) + k2 := key(httptest.NewRequest("POST", u, strings.NewReader(`{"q":"have other agent=bar"}`))) + if k1 == k2 { + t.Error("non-git POSTs must not be normalized") + } + }) + + t.Run("upload-pack path without Content-Type is not normalized", func(t *testing.T) { + const u = "https://example.com/foo/git-upload-pack" + body1 := "0032have 553c2077f0edc3d5dc5d17262f6aa498e69d6f8e\n0009done\n" + body2 := "0032have a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2\n0009done\n" + if key(mkUpReq(u, "", body1)) == key(mkUpReq(u, "", body2)) { + t.Error("missing Content-Type must skip normalization") + } + }) } type BufferWithClose struct { diff --git a/internal/gitproto/pktline.go b/internal/gitproto/pktline.go new file mode 100644 index 0000000..ba00b98 --- /dev/null +++ b/internal/gitproto/pktline.go @@ -0,0 +1,118 @@ +package gitproto + +// pktType is the kind of a single pkt-line: either a data packet or one of +// the three special framing packets defined by git's smart-HTTP protocol. +// +// Each pkt-line on the wire begins with a 4-hex-digit length that includes +// itself, or is one of: "0000" flush, "0001" delim (v2), "0002" response-end. +// Any length >= 4 is a data packet whose payload is (length - 4) bytes. +// See https://git-scm.com/docs/protocol-common#_pkt_line_format. +type pktType int + +const ( + pktData pktType = iota + pktFlush + pktDelim + pktResponseEnd +) + +// packet is one parsed pkt-line. payload is set only when typ == pktData and +// excludes the 4-byte length prefix. +type packet struct { + typ pktType + payload []byte +} + +const hexDigits = "0123456789abcdef" + +// parseHex4 decodes a 4-byte ASCII hex prefix without allocating a string. +func parseHex4(b []byte) (n int, ok bool) { + for i := 0; i < 4; i++ { + c := b[i] + var v int + switch { + case c >= '0' && c <= '9': + v = int(c - '0') + case c >= 'a' && c <= 'f': + v = int(c-'a') + 10 + case c >= 'A' && c <= 'F': + v = int(c-'A') + 10 + default: + return 0, false + } + n = n<<4 | v + } + return n, true +} + +// parsePktLine returns ok=false on malformed or truncated input so callers +// can fall back to opaque hashing of the original bytes. +func parsePktLine(data []byte) (packets []packet, ok bool) { + for len(data) > 0 { + if len(data) < 4 { + return nil, false + } + n, ok := parseHex4(data[:4]) + if !ok { + return nil, false + } + switch n { + case 0: + packets = append(packets, packet{typ: pktFlush}) + data = data[4:] + case 1: + packets = append(packets, packet{typ: pktDelim}) + data = data[4:] + case 2: + packets = append(packets, packet{typ: pktResponseEnd}) + data = data[4:] + case 3: + // Reserved; not used by real git. Treat as malformed. + return nil, false + default: + if n > len(data) { + return nil, false + } + packets = append(packets, packet{typ: pktData, payload: data[4:n]}) + data = data[n:] + } + } + return packets, true +} + +// encodePktLine recomputes each data packet's length prefix, which is what +// makes normalization stable across payloads of differing length. +func encodePktLine(packets []packet) []byte { + buf := make([]byte, 0, encodedSize(packets)) + for _, p := range packets { + switch p.typ { + case pktFlush: + buf = append(buf, "0000"...) + case pktDelim: + buf = append(buf, "0001"...) + case pktResponseEnd: + buf = append(buf, "0002"...) + case pktData: + n := 4 + len(p.payload) + buf = append(buf, + hexDigits[(n>>12)&0xf], + hexDigits[(n>>8)&0xf], + hexDigits[(n>>4)&0xf], + hexDigits[n&0xf], + ) + buf = append(buf, p.payload...) + } + } + return buf +} + +func encodedSize(packets []packet) int { + size := 0 + for _, p := range packets { + size += 4 + if p.typ == pktData { + size += len(p.payload) + } + } + return size +} diff --git a/internal/gitproto/pktline_test.go b/internal/gitproto/pktline_test.go new file mode 100644 index 0000000..ed0de08 --- /dev/null +++ b/internal/gitproto/pktline_test.go @@ -0,0 +1,107 @@ +package gitproto + +import ( + "bytes" + "testing" +) + +func TestParsePktLine_Empty(t *testing.T) { + pkts, ok := parsePktLine(nil) + if !ok { + t.Error("expected ok=true for empty input") + } + if len(pkts) != 0 { + t.Fatalf("expected 0 packets, got %d", len(pkts)) + } +} + +func TestParsePktLine_SpecialPackets(t *testing.T) { + cases := map[string]pktType{ + "0000": pktFlush, + "0001": pktDelim, + "0002": pktResponseEnd, + } + for input, want := range cases { + pkts, ok := parsePktLine([]byte(input)) + if !ok || len(pkts) != 1 || pkts[0].typ != want { + t.Errorf("input %q: got %+v ok=%v, want type %d", input, pkts, ok, want) + } + } +} + +func TestParsePktLine_DataPacket(t *testing.T) { + // "000ahello\n" = length 0x000a (10), payload "hello\n" + pkts, ok := parsePktLine([]byte("000ahello\n")) + if !ok || len(pkts) != 1 || pkts[0].typ != pktData || string(pkts[0].payload) != "hello\n" { + t.Errorf("got %+v ok=%v", pkts, ok) + } +} + +func TestParsePktLine_MalformedAndTruncated(t *testing.T) { + // Bad hex prefix. + if _, ok := parsePktLine([]byte("gggghi")); ok { + t.Error("expected ok=false for malformed length prefix") + } + // Length claims 0x0020 but only 9 bytes available. + if _, ok := parsePktLine([]byte("0020short")); ok { + t.Error("expected ok=false for truncated packet") + } + // Length 3 is reserved; we treat as malformed. + if _, ok := parsePktLine([]byte("00030000")); ok { + t.Error("expected ok=false for reserved length 3") + } + // Less than 4 bytes. + if _, ok := parsePktLine([]byte("ab")); ok { + t.Error("expected ok=false for sub-prefix input") + } +} + +func TestParsePktLine_RealV1Body(t *testing.T) { + // Realistic v1 upload-pack body from github.com/octocat/Hello-World + input := "00a4want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d multi_ack_detailed no-done side-band-64k thin-pack no-progress ofs-delta deepen-since deepen-not agent=git/2.43.0\n" + + "0032want b1b3f9723831141a31a1a7252a213e216ea76e56\n" + + "0000" + + "0032have 553c2077f0edc3d5dc5d17262f6aa498e69d6f8e\n" + + "0009done\n" + pkts, ok := parsePktLine([]byte(input)) + if !ok { + t.Fatal("expected ok=true for well-formed v1 body") + } + wantTypes := []pktType{pktData, pktData, pktFlush, pktData, pktData} + if len(pkts) != len(wantTypes) { + t.Fatalf("got %d packets, want %d", len(pkts), len(wantTypes)) + } + for i, want := range wantTypes { + if pkts[i].typ != want { + t.Errorf("packet %d: got type %d, want %d", i, pkts[i].typ, want) + } + } +} + +func TestParsePktLine_RealV2Body(t *testing.T) { + input := "0012command=fetch\n" + + "0015agent=git/2.43.0\n" + + "0001" + + "000ddeepen 1\n" + + "0032want 7fd1a60b01f91b314f59955a4e4d4e80d8edf11d\n" + + "0009done\n" + + "0000" + pkts, ok := parsePktLine([]byte(input)) + if !ok || len(pkts) != 7 { + t.Fatalf("got %d packets ok=%v, want 7 ok=true", len(pkts), ok) + } + if pkts[2].typ != pktDelim || pkts[6].typ != pktFlush { + t.Error("special packets misidentified") + } +} + +func TestEncodePktLine_RoundTrip(t *testing.T) { + input := []byte("000ahello\n" + "0000" + "0001" + "000aworld\n" + "0002") + pkts, ok := parsePktLine(input) + if !ok { + t.Fatal("parse failed on well-formed input") + } + if got := encodePktLine(pkts); !bytes.Equal(got, input) { + t.Errorf("round-trip mismatch:\n in: %q\n out: %q", input, got) + } +} diff --git a/internal/gitproto/upload_pack.go b/internal/gitproto/upload_pack.go new file mode 100644 index 0000000..2af9590 --- /dev/null +++ b/internal/gitproto/upload_pack.go @@ -0,0 +1,96 @@ +// Package gitproto provides helpers for stabilizing git smart-HTTP cache keys. +// +// Only IsUploadPackRequest and NormalizeUploadPackBody are exported; the +// pkt-line framing parser is an implementation detail. +package gitproto + +import ( + "bytes" + "mime" + "net/http" + "regexp" + "strings" +) + +// uploadPackContentType is the media type real git clients send on a fetch. +// See https://git-scm.com/docs/http-protocol. +const uploadPackContentType = "application/x-git-upload-pack-request" + +// inlineVolatileTokenRegex matches " agent=…" / " session-id=…" tokens +// trailing a v1 capability list. The leading space anchors the match so a +// payload that merely starts with the same text is not affected. +var inlineVolatileTokenRegex = regexp.MustCompile(` (?:agent|session-id)=[^ \r\n]*`) + +// volatileStandalonePrefixes lists payload prefixes whose entire pkt-line is +// dropped: per-process v2 capability lines that don't influence the pack. +// +// "have" lines are intentionally NOT here — they drive object negotiation and +// the upstream response depends on them. +var volatileStandalonePrefixes = [][]byte{[]byte("agent="), []byte("session-id=")} + +// hasVolatilePrefix reports whether payload begins with any prefix in +// volatileStandalonePrefixes. +func hasVolatilePrefix(payload []byte) bool { + for _, prefix := range volatileStandalonePrefixes { + if bytes.HasPrefix(payload, prefix) { + return true + } + } + return false +} + +// IsUploadPackRequest reports whether r is a smart-HTTP git-upload-pack POST. +// All three of method, path suffix, and Content-Type must match so that an +// unrelated POST sharing the URL suffix isn't routed through normalization. +func IsUploadPackRequest(r *http.Request) bool { + if r.Method != http.MethodPost { + return false + } + if !strings.HasSuffix(r.URL.Path, "/git-upload-pack") { + return false + } + // mime.ParseMediaType handles RFC 7231 case-insensitivity and parameter + // whitespace; we only care about the canonical media type. + mediaType, _, err := mime.ParseMediaType(r.Header.Get("Content-Type")) + return err == nil && mediaType == uploadPackContentType +} + +// NormalizeUploadPackBody returns a stable cache-key input derived from a +// git-upload-pack POST body. The output is hash input only — never sent on +// the wire. +// +// Stripped (per-process noise that doesn't shape the pack): +// - standalone "agent=" / "session-id=" pkt-lines (v2 capabilities) +// - inline " agent=" / " session-id=" tokens on v1 want lines +// +// Preserved (everything that can change the upstream response): wants, haves, +// capabilities, command=, deepen/shallow, filter, ref-prefix, object-format, +// and all framing packets. Re-encoding recomputes pkt-line length prefixes, +// so requests differing only in a stripped value's length still hash equal. +// +// Malformed input is returned unchanged so callers fall back to opaque +// hashing (cache miss is acceptable; collision is not). +func NormalizeUploadPackBody(data []byte) []byte { + packets, ok := parsePktLine(data) + if !ok { + return data + } + filtered := packets[:0] + for _, p := range packets { + if p.typ != pktData { + filtered = append(filtered, p) + continue + } + if hasVolatilePrefix(p.payload) { + continue + } + // Match-then-Replace skips the alloc on the common no-match path. + if inlineVolatileTokenRegex.Match(p.payload) { + cleaned := inlineVolatileTokenRegex.ReplaceAll(p.payload, nil) + filtered = append(filtered, packet{typ: pktData, payload: cleaned}) + continue + } + filtered = append(filtered, p) + } + return encodePktLine(filtered) +} diff --git a/internal/gitproto/upload_pack_test.go b/internal/gitproto/upload_pack_test.go new file mode 100644 index 0000000..a9bf038 --- /dev/null +++ b/internal/gitproto/upload_pack_test.go @@ -0,0 +1,177 @@ +package gitproto + +import ( + "bytes" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +func normalize(body string) string { + return string(NormalizeUploadPackBody([]byte(body))) +} + +func TestIsUploadPackRequest(t *testing.T) { + const ct = "application/x-git-upload-pack-request" + cases := []struct { + name string + method string + url string + contentType string + want bool + }{ + {"real git POST", http.MethodPost, "https://github.com/octocat/Hello-World.git/git-upload-pack", ct, true}, + {"real git POST with charset parameter", http.MethodPost, "https://github.com/octocat/Hello-World.git/git-upload-pack", ct + "; charset=utf-8", true}, + {"uppercase media type (RFC 7231 case-insensitive)", http.MethodPost, "https://github.com/octocat/Hello-World.git/git-upload-pack", "Application/X-Git-Upload-Pack-Request", true}, + {"extra whitespace around parameter", http.MethodPost, "https://github.com/octocat/Hello-World.git/git-upload-pack", ct + " ; charset=utf-8", true}, + {"GET to upload-pack URL", http.MethodGet, "https://github.com/octocat/Hello-World.git/git-upload-pack", ct, false}, + {"POST to other git path (info/refs)", http.MethodPost, "https://github.com/octocat/Hello-World.git/info/refs", ct, false}, + {"POST to non-git path", http.MethodPost, "https://api.github.com/graphql", "application/json", false}, + {"fake upload-pack path with wrong Content-Type", http.MethodPost, "https://example.com/foo/git-upload-pack", "application/json", false}, + {"upload-pack path with no Content-Type", http.MethodPost, "https://github.com/octocat/Hello-World.git/git-upload-pack", "", false}, + {"path ends in git-upload-pack but no leading slash", http.MethodPost, "https://example.com/notgit-upload-pack", ct, false}, + {"path has trailing segment after git-upload-pack", http.MethodPost, "https://github.com/foo.git/git-upload-pack/extra", ct, false}, + } + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + req := httptest.NewRequest(tc.method, tc.url, nil) + if tc.contentType != "" { + req.Header.Set("Content-Type", tc.contentType) + } + if got := IsUploadPackRequest(req); got != tc.want { + t.Errorf("got %v, want %v", got, tc.want) + } + }) + } +} + +// Realistic OIDs and pkt-line lengths captured from github.com/octocat/Hello-World. +const ( + oidA = "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d" + oidB = "b1b3f9723831141a31a1a7252a213e216ea76e56" + oidC = "b3cbd5bbd7e81436d2eee04537ea2b4c0cad4cdf" + oidH1 = "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e" + oidH2 = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2" +) + +func TestNormalizeUploadPackBody(t *testing.T) { + t.Run("agent= drift collapses; haves preserved", func(t *testing.T) { + body1 := "00a4want " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack no-progress ofs-delta deepen-since deepen-not agent=git/2.43.0\n" + + "0032want " + oidB + "\n0000" + + "0032have " + oidH1 + "\n0009done\n" + body2 := "00a3want " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack no-progress ofs-delta deepen-since deepen-not agent=git/2.9.5\n" + + "0032want " + oidB + "\n0000" + + "0032have " + oidH1 + "\n0009done\n" + if normalize(body1) != normalize(body2) { + t.Errorf("normalized bodies differ:\n %q\n %q", normalize(body1), normalize(body2)) + } + }) + + t.Run("different haves do not collide", func(t *testing.T) { + body1 := "0032want " + oidA + "\n0000" + "0032have " + oidH1 + "\n0009done\n" + body2 := "0032want " + oidA + "\n0000" + "0032have " + oidH2 + "\n0009done\n" + if normalize(body1) == normalize(body2) { + t.Error("haves drive the upstream pack and must not collapse") + } + }) + + t.Run("different wants do not collide", func(t *testing.T) { + body1 := "0032want " + oidA + "\n0009done\n" + body2 := "0032want " + oidC + "\n0009done\n" + if normalize(body1) == normalize(body2) { + t.Error("different wants must not collide") + } + }) + + t.Run("response-shaping fields stay distinct", func(t *testing.T) { + bodies := map[string]string{ + "plain": "0032want " + oidA + "\n0009done\n", + "shallow": "000ddeepen 1\n0032want " + oidA + "\n0009done\n", + "shallow-deeper": "000ddeepen 2\n0032want " + oidA + "\n0009done\n", + "filter-blobless": "0015filter blob:none\n0032want " + oidA + "\n0009done\n", + "filter-treeless": "0012filter tree:0\n0032want " + oidA + "\n0009done\n", + "thin-pack-on": "0080want " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.43.0\n0009done\n", + "thin-pack-off": "0076want " + oidA + " multi_ack_detailed no-done side-band-64k ofs-delta agent=git/2.43.0\n0009done\n", + } + seen := make(map[string]string) + for name, body := range bodies { + n := normalize(body) + if other, ok := seen[n]; ok { + t.Errorf("collision: %q == %q", name, other) + } + seen[n] = name + } + }) + + t.Run("v2 ls-refs ref-prefix is preserved", func(t *testing.T) { + body1 := "0014command=ls-refs\n0015agent=git/2.43.0\n001bref-prefix refs/heads/\n0000" + body2 := "0014command=ls-refs\n0015agent=git/2.43.0\n001aref-prefix refs/tags/\n0000" + if normalize(body1) == normalize(body2) { + t.Error("different ref-prefix must not collide") + } + }) + + t.Run("v2 fetch agent drift collapses", func(t *testing.T) { + body1 := "0012command=fetch\n001bagent=git/2.43.0-Linux\n0001000ddeepen 1\n0032want " + oidA + "\n0009done\n0000" + body2 := "0012command=fetch\n001bagent=git/2.53.0-Linux\n0001000ddeepen 1\n0032want " + oidA + "\n0009done\n0000" + if normalize(body1) != normalize(body2) { + t.Error("v2 agent drift must not affect normalization") + } + }) + + t.Run("v1 inline agent stripped, other capabilities kept", func(t *testing.T) { + body1 := "0080want " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.43.0\n0009done\n" + body2 := "0080want " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.53.0\n0009done\n" + got := normalize(body1) + if got != normalize(body2) { + t.Errorf("agent-only difference must collapse:\n %q\n %q", got, normalize(body2)) + } + if !strings.Contains(got, "thin-pack") || strings.Contains(got, "agent=") { + t.Errorf("got %q", got) + } + }) + + t.Run("malformed body returned unchanged", func(t *testing.T) { + body := []byte("this is not pkt-line data") + if got := NormalizeUploadPackBody(body); !bytes.Equal(got, body) { + t.Errorf("got %q", got) + } + }) + + t.Run("empty body produces empty output", func(t *testing.T) { + if got := NormalizeUploadPackBody(nil); len(got) != 0 { + t.Errorf("got %q", got) + } + }) + + // session-id is a per-invocation UUID added in Git 2.36 (April 2022). + t.Run("v2 standalone session-id is stripped", func(t *testing.T) { + body1 := "0012command=fetch\n0015agent=git/2.43.0\n0016session-id=abcdef\n00010032want " + oidA + "\n0009done\n0000" + body2 := "0012command=fetch\n0015agent=git/2.43.0\n0016session-id=fedcba\n00010032want " + oidA + "\n0009done\n0000" + got := normalize(body1) + if got != normalize(body2) || strings.Contains(got, "session-id=") { + t.Errorf("got %q", got) + } + }) + + t.Run("v1 inline session-id stripped alongside agent", func(t *testing.T) { + body1 := "009awant " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.43.0 session-id=aaa-1\n0009done\n" + body2 := "009awant " + oidA + " multi_ack_detailed no-done side-band-64k thin-pack ofs-delta agent=git/2.53.0 session-id=zzz-2\n0009done\n" + got := normalize(body1) + if got != normalize(body2) || + strings.Contains(got, "session-id=") || strings.Contains(got, "agent=") || + !strings.Contains(got, "thin-pack") { + t.Errorf("got %q", got) + } + }) + + // Prefix matching is exact: substrings of stripped tokens must survive. + t.Run("'haven' and 'session-ids' prefixes are not stripped", func(t *testing.T) { + body := "000ehaven foo\n0014session-ids=keep\n0009done\n" + got := normalize(body) + if !strings.Contains(got, "haven foo") || !strings.Contains(got, "session-ids=keep") { + t.Errorf("got %q", got) + } + }) +}