Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 13 additions & 62 deletions lib/cmd_export.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package lib

import (
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"os"
Expand Down Expand Up @@ -78,7 +76,7 @@ func CmdExport(f CmdExportFlags, args []string, printHelp func()) error {
defer outFile.Close()
}

// validate format.
// infer format from extension if not specified.
if f.Format == "" {
if strings.HasSuffix(f.Out, ".csv") {
f.Format = "csv"
Expand All @@ -90,9 +88,6 @@ func CmdExport(f CmdExportFlags, args []string, printHelp func()) error {
f.Format = "csv"
}
}
if f.Format != "csv" && f.Format != "tsv" && f.Format != "json" {
return errors.New("format must be \"csv\" or \"tsv\" or \"json\"")
}

// open tree.
db, err := maxminddb.Open(args[0])
Expand All @@ -101,61 +96,17 @@ func CmdExport(f CmdExportFlags, args []string, printHelp func()) error {
}
defer db.Close()

if f.Format == "tsv" || f.Format == "csv" {
// export.
hdrWritten := false
var wr writer
if f.Format == "csv" {
csvwr := csv.NewWriter(outFile)
wr = csvwr
} else {
tsvwr := NewTsvWriter(outFile)
wr = tsvwr
}
for result := range db.Networks() {
record := make(map[string]interface{})
if err := result.Decode(&record); err != nil {
return fmt.Errorf("failed to get record for next subnet: %w", err)
}
subnet := result.Prefix()

recordStr := mapInterfaceToStr(record)
if !hdrWritten {
hdrWritten = true

if !f.NoHdr {
hdr := append([]string{"range"}, sortedMapKeys(recordStr)...)
if err := wr.Write(hdr); err != nil {
return fmt.Errorf(
"failed to write header %v: %w",
hdr, err,
)
}
}
}

line := append(
[]string{subnet.String()},
sortedMapValsByKeys(recordStr)...,
)
if err := wr.Write(line); err != nil {
return fmt.Errorf("failed to write line %v: %w", line, err)
}
}
wr.Flush()
if err := wr.Error(); err != nil {
return fmt.Errorf("writer had failure: %w", err)
}
} else if f.Format == "json" {
enc := json.NewEncoder(outFile)
for result := range db.Networks() {
record := make(map[string]interface{})
if err := result.Decode(&record); err != nil {
return fmt.Errorf("failed to get record for next subnet: %w", err)
}
record["range"] = result.Prefix().String()
enc.Encode(record)
}
var exp exporter
switch f.Format {
case "csv":
exp = newCSVExporter(outFile, f.NoHdr)
case "tsv":
exp = newTSVExporter(outFile, f.NoHdr)
case "json":
exp = newJSONExporter(outFile)
default:
return errors.New("format must be \"csv\" or \"tsv\" or \"json\"")
}
return nil

return exportNetworks(db, exp)
}
26 changes: 26 additions & 0 deletions lib/exporter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package lib

import (
"fmt"

"github.com/oschwald/maxminddb-golang/v2"
)

// exporter defines the interface for exporting MMDB records.
type exporter interface {
WriteRecord(result maxminddb.Result) error
Flush() error
}

// exportNetworks iterates over all networks in the database and writes them using the exporter.
func exportNetworks(db *maxminddb.Reader, exp exporter) error {
for result := range db.Networks() {
if err := result.Err(); err != nil {
return fmt.Errorf("failed networks traversal: %w", err)
}
if err := exp.WriteRecord(result); err != nil {
return err
}
}
return exp.Flush()
}
70 changes: 70 additions & 0 deletions lib/exporter_csv.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
package lib

import (
"encoding/csv"
"fmt"
"io"

"github.com/oschwald/maxminddb-golang/v2"
)

// csvExporter exports records in CSV format.
type csvExporter struct {
wr *csv.Writer
cache map[uintptr]map[string]string
hdrKeys []string
noHdr bool
}

func newCSVExporter(w io.Writer, noHdr bool) *csvExporter {
return &csvExporter{
wr: csv.NewWriter(w),
cache: make(map[uintptr]map[string]string),
noHdr: noHdr,
}
}

func (e *csvExporter) WriteRecord(result maxminddb.Result) error {
offset := result.Offset()
prefix := result.Prefix()

var recordStr map[string]string
if cached, ok := e.cache[offset]; ok {
recordStr = cached
} else {
record := make(map[string]any)
if err := result.Decode(&record); err != nil {
return fmt.Errorf("failed to decode record: %w", err)
}
recordStr = mapInterfaceToStr(record)
e.cache[offset] = recordStr
}

// Write header on first record.
if e.hdrKeys == nil {
e.hdrKeys = sortedMapKeys(recordStr)
if !e.noHdr {
hdr := append([]string{"range"}, e.hdrKeys...)
if err := e.wr.Write(hdr); err != nil {
return fmt.Errorf("failed to write header %v: %w", hdr, err)
}
}
}

// Build values in header key order.
vals := make([]string, len(e.hdrKeys))
for i, k := range e.hdrKeys {
vals[i] = recordStr[k]
}

line := append([]string{prefix.String()}, vals...)
if err := e.wr.Write(line); err != nil {
return fmt.Errorf("failed to write line %v: %w", line, err)
}
return nil
}

func (e *csvExporter) Flush() error {
e.wr.Flush()
return e.wr.Error()
}
56 changes: 56 additions & 0 deletions lib/exporter_json.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package lib

import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"

"github.com/oschwald/maxminddb-golang/v2"
)

const rangePlaceholder = "__RANGE__"

// jsonExporter exports records in JSON Lines format.
type jsonExporter struct {
bw *bufio.Writer
cache map[uintptr][]byte
}

func newJSONExporter(w io.Writer) *jsonExporter {
return &jsonExporter{
bw: bufio.NewWriter(w),
cache: make(map[uintptr][]byte),
}
}

func (e *jsonExporter) WriteRecord(result maxminddb.Result) error {
offset := result.Offset()
prefix := result.Prefix()

cached, ok := e.cache[offset]
if !ok {
record := make(map[string]any)
if err := result.Decode(&record); err != nil {
return fmt.Errorf("failed to decode record: %w", err)
}
record["range"] = rangePlaceholder

encoded, err := json.Marshal(record)
if err != nil {
return fmt.Errorf("failed to encode record: %w", err)
}
cached = encoded
e.cache[offset] = cached
}

line := bytes.Replace(cached, []byte(rangePlaceholder), []byte(prefix.String()), 1)
e.bw.Write(line)
e.bw.WriteByte('\n')
return nil
}

func (e *jsonExporter) Flush() error {
return e.bw.Flush()
}
69 changes: 69 additions & 0 deletions lib/exporter_tsv.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package lib

import (
"fmt"
"io"

"github.com/oschwald/maxminddb-golang/v2"
)

// tsvExporter exports records in TSV format.
type tsvExporter struct {
wr *TsvWriter
cache map[uintptr]map[string]string
hdrKeys []string
noHdr bool
}

func newTSVExporter(w io.Writer, noHdr bool) *tsvExporter {
return &tsvExporter{
wr: NewTsvWriter(w),
cache: make(map[uintptr]map[string]string),
noHdr: noHdr,
}
}

func (e *tsvExporter) WriteRecord(result maxminddb.Result) error {
offset := result.Offset()
prefix := result.Prefix()

var recordStr map[string]string
if cached, ok := e.cache[offset]; ok {
recordStr = cached
} else {
record := make(map[string]any)
if err := result.Decode(&record); err != nil {
return fmt.Errorf("failed to decode record: %w", err)
}
recordStr = mapInterfaceToStr(record)
e.cache[offset] = recordStr
}

// Write header on first record.
if e.hdrKeys == nil {
e.hdrKeys = sortedMapKeys(recordStr)
if !e.noHdr {
hdr := append([]string{"range"}, e.hdrKeys...)
if err := e.wr.Write(hdr); err != nil {
return fmt.Errorf("failed to write header %v: %w", hdr, err)
}
}
}

// Build values in header key order.
vals := make([]string, len(e.hdrKeys))
for i, k := range e.hdrKeys {
vals[i] = recordStr[k]
}

line := append([]string{prefix.String()}, vals...)
if err := e.wr.Write(line); err != nil {
return fmt.Errorf("failed to write line %v: %w", line, err)
}
return nil
}

func (e *tsvExporter) Flush() error {
e.wr.Flush()
return e.wr.Error()
}
14 changes: 9 additions & 5 deletions lib/writer_tsv.go
Original file line number Diff line number Diff line change
@@ -1,27 +1,31 @@
package lib

import (
"fmt"
"bufio"
"io"
"strings"
)

type TsvWriter struct {
w io.Writer
bw *bufio.Writer
}

func NewTsvWriter(w io.Writer) *TsvWriter {
return &TsvWriter{
w: w,
bw: bufio.NewWriter(w),
}
}

func (w *TsvWriter) Write(record []string) error {
_, err := fmt.Fprintln(w.w, strings.Join(record, "\t"))
return err
_, err := w.bw.WriteString(strings.Join(record, "\t"))
if err != nil {
return err
}
return w.bw.WriteByte('\n')
}

func (w *TsvWriter) Flush() {
w.bw.Flush()
}

func (w *TsvWriter) Error() error {
Expand Down