From f5f60e911319efe0a8c3d83d404d05170e5ba623 Mon Sep 17 00:00:00 2001 From: Nilushan Costa Date: Thu, 2 Apr 2026 17:18:00 +0530 Subject: [PATCH 1/2] feat: add OpenSearch based logs adapter Signed-off-by: Nilushan Costa --- observability-logs-opensearch/Dockerfile | 24 + observability-logs-opensearch/Makefile | 19 + observability-logs-opensearch/go.mod | 24 + observability-logs-opensearch/go.sum | 66 + observability-logs-opensearch/helm/Chart.yaml | 4 +- .../helm/templates/adapter/configmap.yaml | 17 + .../helm/templates/adapter/deployment.yaml | 61 + .../helm/templates/adapter/service.yaml | 21 + .../helm/values.yaml | 15 + .../internal/api/cfg-client.yaml | 4 + .../internal/api/cfg-models.yaml | 4 + .../internal/api/cfg-server.yaml | 6 + .../internal/api/gen/client.gen.go | 1172 +++++++++++++++++ .../internal/api/gen/models.gen.go | 484 +++++++ .../internal/api/gen/server.gen.go | 980 ++++++++++++++ .../internal/config.go | 100 ++ .../internal/handlers.go | 878 ++++++++++++ .../internal/observer/client.go | 79 ++ .../internal/opensearch/client.go | 357 +++++ .../internal/opensearch/labels.go | 62 + .../internal/opensearch/queries.go | 491 +++++++ .../internal/opensearch/types.go | 309 +++++ .../internal/server.go | 58 + observability-logs-opensearch/main.go | 95 ++ observability-logs-opensearch/module.yaml | 3 + 25 files changed, 5331 insertions(+), 2 deletions(-) create mode 100644 observability-logs-opensearch/Dockerfile create mode 100644 observability-logs-opensearch/Makefile create mode 100644 observability-logs-opensearch/go.mod create mode 100644 observability-logs-opensearch/go.sum create mode 100644 observability-logs-opensearch/helm/templates/adapter/configmap.yaml create mode 100644 observability-logs-opensearch/helm/templates/adapter/deployment.yaml create mode 100644 observability-logs-opensearch/helm/templates/adapter/service.yaml create mode 100644 observability-logs-opensearch/internal/api/cfg-client.yaml create mode 100644 observability-logs-opensearch/internal/api/cfg-models.yaml create mode 100644 observability-logs-opensearch/internal/api/cfg-server.yaml create mode 100644 observability-logs-opensearch/internal/api/gen/client.gen.go create mode 100644 observability-logs-opensearch/internal/api/gen/models.gen.go create mode 100644 observability-logs-opensearch/internal/api/gen/server.gen.go create mode 100644 observability-logs-opensearch/internal/config.go create mode 100644 observability-logs-opensearch/internal/handlers.go create mode 100644 observability-logs-opensearch/internal/observer/client.go create mode 100644 observability-logs-opensearch/internal/opensearch/client.go create mode 100644 observability-logs-opensearch/internal/opensearch/labels.go create mode 100644 observability-logs-opensearch/internal/opensearch/queries.go create mode 100644 observability-logs-opensearch/internal/opensearch/types.go create mode 100644 observability-logs-opensearch/internal/server.go create mode 100644 observability-logs-opensearch/main.go diff --git a/observability-logs-opensearch/Dockerfile b/observability-logs-opensearch/Dockerfile new file mode 100644 index 0000000..56a013f --- /dev/null +++ b/observability-logs-opensearch/Dockerfile @@ -0,0 +1,24 @@ +# Copyright 2026 The OpenChoreo Authors +# SPDX-License-Identifier: Apache-2.0 + +FROM golang:1.26-alpine AS builder + +WORKDIR /app +COPY go.mod go.sum* ./ +RUN go mod download +COPY . . +RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o main . + +FROM alpine:latest + +RUN apk --no-cache add ca-certificates && \ + addgroup -g 10500 appuser && \ + adduser -D -u 10500 -G appuser appuser + +WORKDIR /home/appuser +COPY --from=builder --chown=appuser:appuser --chmod=0550 /app/main . + +USER appuser +EXPOSE 9098 + +CMD ["./main"] diff --git a/observability-logs-opensearch/Makefile b/observability-logs-opensearch/Makefile new file mode 100644 index 0000000..138ff56 --- /dev/null +++ b/observability-logs-opensearch/Makefile @@ -0,0 +1,19 @@ +CFG_DIR := internal/api +OAPI_CODEGEN_VERSION ?= v2.5.1 +SPEC := https://raw.githubusercontent.com/openchoreo/openchoreo.github.io/refs/heads/main/static/api-specs/observability-logs-adapter-api.yaml + +.PHONY: oapi-codegen-install openapi-codegen unit-test + +oapi-codegen-install: + go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@$(OAPI_CODEGEN_VERSION) + +openapi-codegen: oapi-codegen-install + cd $(CFG_DIR) && $(shell go env GOPATH)/bin/oapi-codegen --config cfg-models.yaml $(SPEC) + cd $(CFG_DIR) && $(shell go env GOPATH)/bin/oapi-codegen --config cfg-server.yaml $(SPEC) + cd $(CFG_DIR) && $(shell go env GOPATH)/bin/oapi-codegen --config cfg-client.yaml $(SPEC) + +MODULE_NAME := $(notdir $(CURDIR)) + +unit-test: + go test -coverprofile=coverage.out ./... + mv coverage.out ../$(MODULE_NAME)-coverage.out diff --git a/observability-logs-opensearch/go.mod b/observability-logs-opensearch/go.mod new file mode 100644 index 0000000..1530846 --- /dev/null +++ b/observability-logs-opensearch/go.mod @@ -0,0 +1,24 @@ +module github.com/openchoreo/community-modules/observability-logs-opensearch + +go 1.26 + +require ( + github.com/getkin/kin-openapi v0.133.0 + github.com/google/uuid v1.5.0 + github.com/oapi-codegen/runtime v1.2.0 + github.com/opensearch-project/opensearch-go/v4 v4.6.0 +) + +require ( + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect + github.com/woodsbury/decimal128 v1.3.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/observability-logs-opensearch/go.sum b/observability-logs-opensearch/go.sum new file mode 100644 index 0000000..1c0a0f8 --- /dev/null +++ b/observability-logs-opensearch/go.sum @@ -0,0 +1,66 @@ +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= +github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/oapi-codegen/runtime v1.2.0 h1:RvKc1CVS1QeKSNzO97FBQbSMZyQ8s6rZd+LpmzwHMP4= +github.com/oapi-codegen/runtime v1.2.0/go.mod h1:Y7ZhmmlE8ikZOmuHRRndiIm7nf3xcVv+YMweKgG1DT0= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= +github.com/opensearch-project/opensearch-go/v4 v4.6.0 h1:Ac8aLtDSmLEyOmv0r1qhQLw3b4vcUhE42NE9k+Z4cRc= +github.com/opensearch-project/opensearch-go/v4 v4.6.0/go.mod h1:3iZtb4SNt3IzaxavKq0dURh1AmtVgYW71E4XqmYnIiQ= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/wI2L/jsondiff v0.7.0 h1:1lH1G37GhBPqCfp/lrs91rf/2j3DktX6qYAKZkLuCQQ= +github.com/wI2L/jsondiff v0.7.0/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM= +github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0= +github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/observability-logs-opensearch/helm/Chart.yaml b/observability-logs-opensearch/helm/Chart.yaml index 421973e..cef81b5 100644 --- a/observability-logs-opensearch/helm/Chart.yaml +++ b/observability-logs-opensearch/helm/Chart.yaml @@ -5,8 +5,8 @@ apiVersion: v2 name: observability-logs-opensearch description: A Helm chart for OpenChoreo Observability Logs module with Fluent Bit and OpenSearch type: application -version: 0.3.11 -appVersion: "0.3.11" +version: 0.4.0 +appVersion: "0.4.0" keywords: - opensearch - observability diff --git a/observability-logs-opensearch/helm/templates/adapter/configmap.yaml b/observability-logs-opensearch/helm/templates/adapter/configmap.yaml new file mode 100644 index 0000000..e4f4c06 --- /dev/null +++ b/observability-logs-opensearch/helm/templates/adapter/configmap.yaml @@ -0,0 +1,17 @@ +# Copyright 2026 The OpenChoreo Authors +# SPDX-License-Identifier: Apache-2.0 + +{{- if .Values.adapter.enabled }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: logs-adapter-opensearch + namespace: {{ .Release.Namespace }} + labels: + app: logs-adapter-opensearch +data: + SERVER_PORT: "9098" + OPENSEARCH_ADDRESS: "https://opensearch:9200" + OPENSEARCH_INDEX_PREFIX: "container-logs-" + OBSERVER_URL: {{ .Values.adapter.observerUrl | quote }} +{{- end }} diff --git a/observability-logs-opensearch/helm/templates/adapter/deployment.yaml b/observability-logs-opensearch/helm/templates/adapter/deployment.yaml new file mode 100644 index 0000000..8122b90 --- /dev/null +++ b/observability-logs-opensearch/helm/templates/adapter/deployment.yaml @@ -0,0 +1,61 @@ +# Copyright 2026 The OpenChoreo Authors +# SPDX-License-Identifier: Apache-2.0 + +{{- if .Values.adapter.enabled }} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: logs-adapter-opensearch + namespace: {{ .Release.Namespace }} + labels: + app: logs-adapter-opensearch +spec: + replicas: 1 + selector: + matchLabels: + app: logs-adapter-opensearch + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/adapter/configmap.yaml") . | sha256sum }} + labels: + app: logs-adapter-opensearch + spec: + securityContext: + runAsUser: 10500 + runAsGroup: 10500 + runAsNonRoot: true + containers: + - name: logs-adapter-opensearch + image: "{{ .Values.adapter.image.repository }}:{{ .Values.adapter.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.adapter.image.pullPolicy | default "IfNotPresent" }} + ports: + - containerPort: 9098 + envFrom: + - configMapRef: + name: logs-adapter-opensearch + env: + - name: OPENSEARCH_USERNAME + valueFrom: + secretKeyRef: + name: {{ .Values.adapter.openSearchSecretName }} + key: username + - name: OPENSEARCH_PASSWORD + valueFrom: + secretKeyRef: + name: {{ .Values.adapter.openSearchSecretName }} + key: password + resources: + limits: + cpu: {{ .Values.adapter.resources.limits.cpu }} + memory: {{ .Values.adapter.resources.limits.memory }} + requests: + cpu: {{ .Values.adapter.resources.requests.cpu }} + memory: {{ .Values.adapter.resources.requests.memory }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true +{{- end }} diff --git a/observability-logs-opensearch/helm/templates/adapter/service.yaml b/observability-logs-opensearch/helm/templates/adapter/service.yaml new file mode 100644 index 0000000..568137b --- /dev/null +++ b/observability-logs-opensearch/helm/templates/adapter/service.yaml @@ -0,0 +1,21 @@ +# Copyright 2026 The OpenChoreo Authors +# SPDX-License-Identifier: Apache-2.0 + +{{- if .Values.adapter.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: logs-adapter + namespace: {{ .Release.Namespace }} + labels: + app: logs-adapter-opensearch +spec: + type: ClusterIP + ports: + - port: 9098 + targetPort: 9098 + protocol: TCP + name: http + selector: + app: logs-adapter-opensearch +{{- end }} diff --git a/observability-logs-opensearch/helm/values.yaml b/observability-logs-opensearch/helm/values.yaml index b436cab..4ad656d 100644 --- a/observability-logs-opensearch/helm/values.yaml +++ b/observability-logs-opensearch/helm/values.yaml @@ -169,6 +169,21 @@ openSearchCluster: - "admin" description: "Admin user" +adapter: + enabled: true + observerUrl: "http://observer-internal.openchoreo-observability-plane:8081" + openSearchSecretName: "" + image: + repository: "ghcr.io/openchoreo/observability-logs-opensearch-adapter" + tag: "" + resources: + limits: + cpu: 100m + memory: 100Mi + requests: + cpu: 50m + memory: 64Mi + openSearchSetup: enabled: true image: diff --git a/observability-logs-opensearch/internal/api/cfg-client.yaml b/observability-logs-opensearch/internal/api/cfg-client.yaml new file mode 100644 index 0000000..e9e29fd --- /dev/null +++ b/observability-logs-opensearch/internal/api/cfg-client.yaml @@ -0,0 +1,4 @@ +package: gen +output: gen/client.gen.go +generate: + client: true diff --git a/observability-logs-opensearch/internal/api/cfg-models.yaml b/observability-logs-opensearch/internal/api/cfg-models.yaml new file mode 100644 index 0000000..cb8418d --- /dev/null +++ b/observability-logs-opensearch/internal/api/cfg-models.yaml @@ -0,0 +1,4 @@ +package: gen +output: gen/models.gen.go +generate: + models: true diff --git a/observability-logs-opensearch/internal/api/cfg-server.yaml b/observability-logs-opensearch/internal/api/cfg-server.yaml new file mode 100644 index 0000000..59acbad --- /dev/null +++ b/observability-logs-opensearch/internal/api/cfg-server.yaml @@ -0,0 +1,6 @@ +package: gen +output: gen/server.gen.go +generate: + std-http-server: true + strict-server: true + embedded-spec: true diff --git a/observability-logs-opensearch/internal/api/gen/client.gen.go b/observability-logs-opensearch/internal/api/gen/client.gen.go new file mode 100644 index 0000000..d85cf7d --- /dev/null +++ b/observability-logs-opensearch/internal/api/gen/client.gen.go @@ -0,0 +1,1172 @@ +// Package gen provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.5.1 DO NOT EDIT. +package gen + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/oapi-codegen/runtime" +) + +// RequestEditorFn is the function signature for the RequestEditor callback function +type RequestEditorFn func(ctx context.Context, req *http.Request) error + +// Doer performs HTTP requests. +// +// The standard http.Client implements this interface. +type HttpRequestDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + // The endpoint of the server conforming to this interface, with scheme, + // https://api.deepmap.com for example. This can contain a path relative + // to the server, such as https://api.deepmap.com/dev-test, and all the + // paths in the swagger spec will be appended to the server. + Server string + + // Doer for performing requests, typically a *http.Client with any + // customized settings, such as certificate chains. + Client HttpRequestDoer + + // A list of callbacks for modifying requests which are generated before sending over + // the network. + RequestEditors []RequestEditorFn +} + +// ClientOption allows setting custom parameters during construction +type ClientOption func(*Client) error + +// Creates a new Client, with reasonable defaults +func NewClient(server string, opts ...ClientOption) (*Client, error) { + // create a client with sane default values + client := Client{ + Server: server, + } + // mutate client and add all optional params + for _, o := range opts { + if err := o(&client); err != nil { + return nil, err + } + } + // ensure the server URL always has a trailing slash + if !strings.HasSuffix(client.Server, "/") { + client.Server += "/" + } + // create httpClient, if not already present + if client.Client == nil { + client.Client = &http.Client{} + } + return &client, nil +} + +// WithHTTPClient allows overriding the default Doer, which is +// automatically created using http.Client. This is useful for tests. +func WithHTTPClient(doer HttpRequestDoer) ClientOption { + return func(c *Client) error { + c.Client = doer + return nil + } +} + +// WithRequestEditorFn allows setting up a callback function, which will be +// called right before sending the request. This can be used to mutate the request. +func WithRequestEditorFn(fn RequestEditorFn) ClientOption { + return func(c *Client) error { + c.RequestEditors = append(c.RequestEditors, fn) + return nil + } +} + +// The interface specification for the client above. +type ClientInterface interface { + // QueryLogsWithBody request with any body + QueryLogsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + QueryLogs(ctx context.Context, body QueryLogsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAlertRuleWithBody request with any body + CreateAlertRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAlertRule(ctx context.Context, body CreateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAlertRule request + DeleteAlertRule(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAlertRule request + GetAlertRule(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateAlertRuleWithBody request with any body + UpdateAlertRuleWithBody(ctx context.Context, ruleName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateAlertRule(ctx context.Context, ruleName string, body UpdateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // HandleAlertWebhookWithBody request with any body + HandleAlertWebhookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + HandleAlertWebhook(ctx context.Context, body HandleAlertWebhookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // Health request + Health(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) +} + +func (c *Client) QueryLogsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryLogsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) QueryLogs(ctx context.Context, body QueryLogsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryLogsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAlertRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAlertRuleRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAlertRule(ctx context.Context, body CreateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAlertRuleRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAlertRule(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAlertRuleRequest(c.Server, ruleName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAlertRule(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAlertRuleRequest(c.Server, ruleName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateAlertRuleWithBody(ctx context.Context, ruleName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateAlertRuleRequestWithBody(c.Server, ruleName, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateAlertRule(ctx context.Context, ruleName string, body UpdateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateAlertRuleRequest(c.Server, ruleName, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) HandleAlertWebhookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewHandleAlertWebhookRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) HandleAlertWebhook(ctx context.Context, body HandleAlertWebhookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewHandleAlertWebhookRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) Health(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewHealthRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +// NewQueryLogsRequest calls the generic QueryLogs builder with application/json body +func NewQueryLogsRequest(server string, body QueryLogsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewQueryLogsRequestWithBody(server, "application/json", bodyReader) +} + +// NewQueryLogsRequestWithBody generates requests for QueryLogs with any type of body +func NewQueryLogsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1/logs/query") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateAlertRuleRequest calls the generic CreateAlertRule builder with application/json body +func NewCreateAlertRuleRequest(server string, body CreateAlertRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAlertRuleRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateAlertRuleRequestWithBody generates requests for CreateAlertRule with any type of body +func NewCreateAlertRuleRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1alpha1/alerts/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteAlertRuleRequest generates requests for DeleteAlertRule +func NewDeleteAlertRuleRequest(server string, ruleName string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleName", runtime.ParamLocationPath, ruleName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1alpha1/alerts/rules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAlertRuleRequest generates requests for GetAlertRule +func NewGetAlertRuleRequest(server string, ruleName string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleName", runtime.ParamLocationPath, ruleName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1alpha1/alerts/rules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateAlertRuleRequest calls the generic UpdateAlertRule builder with application/json body +func NewUpdateAlertRuleRequest(server string, ruleName string, body UpdateAlertRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateAlertRuleRequestWithBody(server, ruleName, "application/json", bodyReader) +} + +// NewUpdateAlertRuleRequestWithBody generates requests for UpdateAlertRule with any type of body +func NewUpdateAlertRuleRequestWithBody(server string, ruleName string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleName", runtime.ParamLocationPath, ruleName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1alpha1/alerts/rules/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewHandleAlertWebhookRequest calls the generic HandleAlertWebhook builder with application/json body +func NewHandleAlertWebhookRequest(server string, body HandleAlertWebhookJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewHandleAlertWebhookRequestWithBody(server, "application/json", bodyReader) +} + +// NewHandleAlertWebhookRequestWithBody generates requests for HandleAlertWebhook with any type of body +func NewHandleAlertWebhookRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1alpha1/alerts/webhook") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewHealthRequest generates requests for Health +func NewHealthRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/health") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { + for _, r := range c.RequestEditors { + if err := r(ctx, req); err != nil { + return err + } + } + for _, r := range additionalEditors { + if err := r(ctx, req); err != nil { + return err + } + } + return nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { + client, err := NewClient(server, opts...) + if err != nil { + return nil, err + } + return &ClientWithResponses{client}, nil +} + +// WithBaseURL overrides the baseURL. +func WithBaseURL(baseURL string) ClientOption { + return func(c *Client) error { + newBaseURL, err := url.Parse(baseURL) + if err != nil { + return err + } + c.Server = newBaseURL.String() + return nil + } +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // QueryLogsWithBodyWithResponse request with any body + QueryLogsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryLogsResponse, error) + + QueryLogsWithResponse(ctx context.Context, body QueryLogsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryLogsResponse, error) + + // CreateAlertRuleWithBodyWithResponse request with any body + CreateAlertRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAlertRuleResponse, error) + + CreateAlertRuleWithResponse(ctx context.Context, body CreateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAlertRuleResponse, error) + + // DeleteAlertRuleWithResponse request + DeleteAlertRuleWithResponse(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*DeleteAlertRuleResponse, error) + + // GetAlertRuleWithResponse request + GetAlertRuleWithResponse(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*GetAlertRuleResponse, error) + + // UpdateAlertRuleWithBodyWithResponse request with any body + UpdateAlertRuleWithBodyWithResponse(ctx context.Context, ruleName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAlertRuleResponse, error) + + UpdateAlertRuleWithResponse(ctx context.Context, ruleName string, body UpdateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAlertRuleResponse, error) + + // HandleAlertWebhookWithBodyWithResponse request with any body + HandleAlertWebhookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*HandleAlertWebhookResponse, error) + + HandleAlertWebhookWithResponse(ctx context.Context, body HandleAlertWebhookJSONRequestBody, reqEditors ...RequestEditorFn) (*HandleAlertWebhookResponse, error) + + // HealthWithResponse request + HealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*HealthResponse, error) +} + +type QueryLogsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *LogsQueryResponse + JSON400 *ErrorResponse + JSON401 *ErrorResponse + JSON403 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r QueryLogsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r QueryLogsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAlertRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *AlertingRuleSyncResponse + JSON400 *ErrorResponse + JSON409 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateAlertRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAlertRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAlertRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AlertingRuleSyncResponse + JSON400 *ErrorResponse + JSON404 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteAlertRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAlertRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAlertRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AlertRuleResponse + JSON400 *ErrorResponse + JSON404 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r GetAlertRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAlertRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateAlertRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AlertingRuleSyncResponse + JSON400 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateAlertRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateAlertRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type HandleAlertWebhookResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AlertWebhookResponse + JSON400 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r HandleAlertWebhookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r HandleAlertWebhookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type HealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Status *string `json:"status,omitempty"` + } + JSON503 *struct { + Error *string `json:"error,omitempty"` + Status *string `json:"status,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r HealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r HealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// QueryLogsWithBodyWithResponse request with arbitrary body returning *QueryLogsResponse +func (c *ClientWithResponses) QueryLogsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryLogsResponse, error) { + rsp, err := c.QueryLogsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryLogsResponse(rsp) +} + +func (c *ClientWithResponses) QueryLogsWithResponse(ctx context.Context, body QueryLogsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryLogsResponse, error) { + rsp, err := c.QueryLogs(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryLogsResponse(rsp) +} + +// CreateAlertRuleWithBodyWithResponse request with arbitrary body returning *CreateAlertRuleResponse +func (c *ClientWithResponses) CreateAlertRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAlertRuleResponse, error) { + rsp, err := c.CreateAlertRuleWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateAlertRuleResponse(rsp) +} + +func (c *ClientWithResponses) CreateAlertRuleWithResponse(ctx context.Context, body CreateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAlertRuleResponse, error) { + rsp, err := c.CreateAlertRule(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateAlertRuleResponse(rsp) +} + +// DeleteAlertRuleWithResponse request returning *DeleteAlertRuleResponse +func (c *ClientWithResponses) DeleteAlertRuleWithResponse(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*DeleteAlertRuleResponse, error) { + rsp, err := c.DeleteAlertRule(ctx, ruleName, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteAlertRuleResponse(rsp) +} + +// GetAlertRuleWithResponse request returning *GetAlertRuleResponse +func (c *ClientWithResponses) GetAlertRuleWithResponse(ctx context.Context, ruleName string, reqEditors ...RequestEditorFn) (*GetAlertRuleResponse, error) { + rsp, err := c.GetAlertRule(ctx, ruleName, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetAlertRuleResponse(rsp) +} + +// UpdateAlertRuleWithBodyWithResponse request with arbitrary body returning *UpdateAlertRuleResponse +func (c *ClientWithResponses) UpdateAlertRuleWithBodyWithResponse(ctx context.Context, ruleName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateAlertRuleResponse, error) { + rsp, err := c.UpdateAlertRuleWithBody(ctx, ruleName, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateAlertRuleResponse(rsp) +} + +func (c *ClientWithResponses) UpdateAlertRuleWithResponse(ctx context.Context, ruleName string, body UpdateAlertRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateAlertRuleResponse, error) { + rsp, err := c.UpdateAlertRule(ctx, ruleName, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateAlertRuleResponse(rsp) +} + +// HandleAlertWebhookWithBodyWithResponse request with arbitrary body returning *HandleAlertWebhookResponse +func (c *ClientWithResponses) HandleAlertWebhookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*HandleAlertWebhookResponse, error) { + rsp, err := c.HandleAlertWebhookWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseHandleAlertWebhookResponse(rsp) +} + +func (c *ClientWithResponses) HandleAlertWebhookWithResponse(ctx context.Context, body HandleAlertWebhookJSONRequestBody, reqEditors ...RequestEditorFn) (*HandleAlertWebhookResponse, error) { + rsp, err := c.HandleAlertWebhook(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseHandleAlertWebhookResponse(rsp) +} + +// HealthWithResponse request returning *HealthResponse +func (c *ClientWithResponses) HealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*HealthResponse, error) { + rsp, err := c.Health(ctx, reqEditors...) + if err != nil { + return nil, err + } + return ParseHealthResponse(rsp) +} + +// ParseQueryLogsResponse parses an HTTP response from a QueryLogsWithResponse call +func ParseQueryLogsResponse(rsp *http.Response) (*QueryLogsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &QueryLogsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest LogsQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseCreateAlertRuleResponse parses an HTTP response from a CreateAlertRuleWithResponse call +func ParseCreateAlertRuleResponse(rsp *http.Response) (*CreateAlertRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CreateAlertRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest AlertingRuleSyncResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseDeleteAlertRuleResponse parses an HTTP response from a DeleteAlertRuleWithResponse call +func ParseDeleteAlertRuleResponse(rsp *http.Response) (*DeleteAlertRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteAlertRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest AlertingRuleSyncResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseGetAlertRuleResponse parses an HTTP response from a GetAlertRuleWithResponse call +func ParseGetAlertRuleResponse(rsp *http.Response) (*GetAlertRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetAlertRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest AlertRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseUpdateAlertRuleResponse parses an HTTP response from a UpdateAlertRuleWithResponse call +func ParseUpdateAlertRuleResponse(rsp *http.Response) (*UpdateAlertRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateAlertRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest AlertingRuleSyncResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseHandleAlertWebhookResponse parses an HTTP response from a HandleAlertWebhookWithResponse call +func ParseHandleAlertWebhookResponse(rsp *http.Response) (*HandleAlertWebhookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &HandleAlertWebhookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest AlertWebhookResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseHealthResponse parses an HTTP response from a HealthWithResponse call +func ParseHealthResponse(rsp *http.Response) (*HealthResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &HealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Status *string `json:"status,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 503: + var dest struct { + Error *string `json:"error,omitempty"` + Status *string `json:"status,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON503 = &dest + + } + + return response, nil +} diff --git a/observability-logs-opensearch/internal/api/gen/models.gen.go b/observability-logs-opensearch/internal/api/gen/models.gen.go new file mode 100644 index 0000000..26c81de --- /dev/null +++ b/observability-logs-opensearch/internal/api/gen/models.gen.go @@ -0,0 +1,484 @@ +// Package gen provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.5.1 DO NOT EDIT. +package gen + +import ( + "encoding/json" + "time" + + "github.com/oapi-codegen/runtime" + openapi_types "github.com/oapi-codegen/runtime/types" +) + +// Defines values for AlertRuleRequestConditionOperator. +const ( + AlertRuleRequestConditionOperatorEq AlertRuleRequestConditionOperator = "eq" + AlertRuleRequestConditionOperatorGt AlertRuleRequestConditionOperator = "gt" + AlertRuleRequestConditionOperatorGte AlertRuleRequestConditionOperator = "gte" + AlertRuleRequestConditionOperatorLt AlertRuleRequestConditionOperator = "lt" + AlertRuleRequestConditionOperatorLte AlertRuleRequestConditionOperator = "lte" + AlertRuleRequestConditionOperatorNeq AlertRuleRequestConditionOperator = "neq" +) + +// Defines values for AlertRuleResponseConditionOperator. +const ( + AlertRuleResponseConditionOperatorEq AlertRuleResponseConditionOperator = "eq" + AlertRuleResponseConditionOperatorGt AlertRuleResponseConditionOperator = "gt" + AlertRuleResponseConditionOperatorGte AlertRuleResponseConditionOperator = "gte" + AlertRuleResponseConditionOperatorLt AlertRuleResponseConditionOperator = "lt" + AlertRuleResponseConditionOperatorLte AlertRuleResponseConditionOperator = "lte" + AlertRuleResponseConditionOperatorNeq AlertRuleResponseConditionOperator = "neq" +) + +// Defines values for AlertRuleResponseSourceMetric. +const ( + CpuUsage AlertRuleResponseSourceMetric = "cpu_usage" + MemoryUsage AlertRuleResponseSourceMetric = "memory_usage" +) + +// Defines values for AlertWebhookResponseStatus. +const ( + Error AlertWebhookResponseStatus = "error" + Success AlertWebhookResponseStatus = "success" +) + +// Defines values for AlertingRuleSyncResponseAction. +const ( + Created AlertingRuleSyncResponseAction = "created" + Deleted AlertingRuleSyncResponseAction = "deleted" + Unchanged AlertingRuleSyncResponseAction = "unchanged" + Updated AlertingRuleSyncResponseAction = "updated" +) + +// Defines values for AlertingRuleSyncResponseStatus. +const ( + Failed AlertingRuleSyncResponseStatus = "failed" + Synced AlertingRuleSyncResponseStatus = "synced" +) + +// Defines values for ErrorResponseTitle. +const ( + BadRequest ErrorResponseTitle = "badRequest" + Conflict ErrorResponseTitle = "conflict" + Forbidden ErrorResponseTitle = "forbidden" + InternalServerError ErrorResponseTitle = "internalServerError" + NotFound ErrorResponseTitle = "notFound" + Unauthorized ErrorResponseTitle = "unauthorized" +) + +// Defines values for LogsQueryRequestLogLevels. +const ( + DEBUG LogsQueryRequestLogLevels = "DEBUG" + ERROR LogsQueryRequestLogLevels = "ERROR" + INFO LogsQueryRequestLogLevels = "INFO" + WARN LogsQueryRequestLogLevels = "WARN" +) + +// Defines values for LogsQueryRequestSortOrder. +const ( + Asc LogsQueryRequestSortOrder = "asc" + Desc LogsQueryRequestSortOrder = "desc" +) + +// AlertRuleRequest defines model for AlertRuleRequest. +type AlertRuleRequest struct { + Condition struct { + // Enabled Whether the alert rule is enabled + Enabled bool `json:"enabled"` + + // Interval The interval of time to query for the alert rule + Interval string `json:"interval"` + + // Operator The operator to use for the alert rule + Operator AlertRuleRequestConditionOperator `json:"operator"` + + // Threshold The threshold value to use for the alert rule + Threshold float32 `json:"threshold"` + + // Window The window of time to query for the alert rule + Window string `json:"window"` + } `json:"condition"` + Metadata struct { + // ComponentUid The OpenChoreo component UID to query + ComponentUid openapi_types.UUID `json:"componentUid"` + + // EnvironmentUid The OpenChoreo environment UID to query + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + + // Name The name of the alert rule + Name string `json:"name"` + + // Namespace The namespace of the alert rule CR + Namespace string `json:"namespace"` + + // ProjectUid The OpenChoreo project UID to query + ProjectUid openapi_types.UUID `json:"projectUid"` + } `json:"metadata"` + Source struct { + // Query The query to execute for log based alerts + Query string `json:"query"` + } `json:"source"` +} + +// AlertRuleRequestConditionOperator The operator to use for the alert rule +type AlertRuleRequestConditionOperator string + +// AlertRuleResponse defines model for AlertRuleResponse. +type AlertRuleResponse struct { + Condition *struct { + // Enabled Whether the alert rule is enabled + Enabled *bool `json:"enabled,omitempty"` + + // Interval The interval of time to query for the alert rule + Interval *string `json:"interval,omitempty"` + + // Operator The operator to use for the alert rule + Operator *AlertRuleResponseConditionOperator `json:"operator,omitempty"` + + // Threshold The threshold value to use for the alert rule + Threshold *float32 `json:"threshold,omitempty"` + + // Window The window of time to query for the alert rule + Window *string `json:"window,omitempty"` + } `json:"condition,omitempty"` + Metadata *struct { + // ComponentUid The OpenChoreo component UID to query + ComponentUid *openapi_types.UUID `json:"componentUid,omitempty"` + + // EnvironmentUid The OpenChoreo environment UID to query + EnvironmentUid *openapi_types.UUID `json:"environmentUid,omitempty"` + + // Name The name of the alert rule + Name *string `json:"name,omitempty"` + + // Namespace The namespace of the alert rule CR + Namespace *string `json:"namespace,omitempty"` + + // ProjectUid The OpenChoreo project UID to query + ProjectUid *openapi_types.UUID `json:"projectUid,omitempty"` + } `json:"metadata,omitempty"` + Source *struct { + // Metric The metric to query for metric based alerts + Metric *AlertRuleResponseSourceMetric `json:"metric,omitempty"` + + // Query The query to execute for log based alerts + Query *string `json:"query,omitempty"` + } `json:"source,omitempty"` +} + +// AlertRuleResponseConditionOperator The operator to use for the alert rule +type AlertRuleResponseConditionOperator string + +// AlertRuleResponseSourceMetric The metric to query for metric based alerts +type AlertRuleResponseSourceMetric string + +// AlertWebhookResponse defines model for AlertWebhookResponse. +type AlertWebhookResponse struct { + // Message The message of the alert webhook + Message *string `json:"message,omitempty"` + + // Status The status of the alert webhook + Status *AlertWebhookResponseStatus `json:"status,omitempty"` +} + +// AlertWebhookResponseStatus The status of the alert webhook +type AlertWebhookResponseStatus string + +// AlertingRuleSyncResponse defines model for AlertingRuleSyncResponse. +type AlertingRuleSyncResponse struct { + // Action The action taken on the alert rule + Action *AlertingRuleSyncResponseAction `json:"action,omitempty"` + + // LastSyncedAt The timestamp of the last sync + LastSyncedAt *string `json:"lastSyncedAt,omitempty"` + + // RuleBackendId The backend ID (UID from observability backend) of the alert rule + RuleBackendId *string `json:"ruleBackendId,omitempty"` + + // RuleLogicalId The logical ID (name) of the alert rule + RuleLogicalId *string `json:"ruleLogicalId,omitempty"` + + // Status The status of the alert rule + Status *AlertingRuleSyncResponseStatus `json:"status,omitempty"` +} + +// AlertingRuleSyncResponseAction The action taken on the alert rule +type AlertingRuleSyncResponseAction string + +// AlertingRuleSyncResponseStatus The status of the alert rule +type AlertingRuleSyncResponseStatus string + +// ComponentLogEntry defines model for ComponentLogEntry. +type ComponentLogEntry struct { + // Level The log level + Level *string `json:"level,omitempty"` + + // Log The log message + Log *string `json:"log,omitempty"` + + // Metadata The metadata of the log entry + Metadata *struct { + // ComponentName The OpenChoreo component name that generated the log + ComponentName *string `json:"componentName,omitempty"` + + // ComponentUid The OpenChoreo component UID that generated the log + ComponentUid *openapi_types.UUID `json:"componentUid,omitempty"` + + // ContainerName The container name that generated the log + ContainerName *string `json:"containerName,omitempty"` + + // EnvironmentName The OpenChoreo environment name that generated the log + EnvironmentName *string `json:"environmentName,omitempty"` + + // EnvironmentUid The OpenChoreo environment UID that generated the log + EnvironmentUid *openapi_types.UUID `json:"environmentUid,omitempty"` + + // NamespaceName The OpenChoreo namespace name that generated the log + NamespaceName *string `json:"namespaceName,omitempty"` + + // PodName The Kubernetes pod name that generated the log + PodName *string `json:"podName,omitempty"` + + // PodNamespace The namespace of the Kubernetes pod that generated the log + PodNamespace *string `json:"podNamespace,omitempty"` + + // ProjectName The OpenChoreo project name that generated the log + ProjectName *string `json:"projectName,omitempty"` + + // ProjectUid The OpenChoreo project UID that generated the log + ProjectUid *openapi_types.UUID `json:"projectUid,omitempty"` + } `json:"metadata,omitempty"` + + // Timestamp The timestamp of the log entry + Timestamp *time.Time `json:"timestamp,omitempty"` +} + +// ComponentSearchScope defines model for ComponentSearchScope. +type ComponentSearchScope struct { + ComponentUid *string `json:"componentUid,omitempty"` + EnvironmentUid *string `json:"environmentUid,omitempty"` + Namespace string `json:"namespace"` + ProjectUid *string `json:"projectUid,omitempty"` +} + +// ErrorResponse defines model for ErrorResponse. +type ErrorResponse struct { + // ErrorCode The error code from observer service + ErrorCode *string `json:"errorCode,omitempty"` + + // Message Human-readable error message + Message *string `json:"message,omitempty"` + + // Title The error message + Title *ErrorResponseTitle `json:"title,omitempty"` +} + +// ErrorResponseTitle The error message +type ErrorResponseTitle string + +// LogsQueryRequest defines model for LogsQueryRequest. +type LogsQueryRequest struct { + // EndTime The end time of the query + EndTime time.Time `json:"endTime"` + + // Limit The maximum number of items to return + Limit *int `json:"limit,omitempty"` + LogLevels *[]LogsQueryRequestLogLevels `json:"logLevels,omitempty"` + SearchPhrase *string `json:"searchPhrase,omitempty"` + SearchScope LogsQueryRequest_SearchScope `json:"searchScope"` + + // SortOrder The sort order of the query + SortOrder *LogsQueryRequestSortOrder `json:"sortOrder,omitempty"` + + // StartTime The start time of the query + StartTime time.Time `json:"startTime"` +} + +// LogsQueryRequestLogLevels defines model for LogsQueryRequest.LogLevels. +type LogsQueryRequestLogLevels string + +// LogsQueryRequest_SearchScope defines model for LogsQueryRequest.SearchScope. +type LogsQueryRequest_SearchScope struct { + union json.RawMessage +} + +// LogsQueryRequestSortOrder The sort order of the query +type LogsQueryRequestSortOrder string + +// LogsQueryResponse defines model for LogsQueryResponse. +type LogsQueryResponse struct { + // Logs The logs queried successfully + Logs *LogsQueryResponse_Logs `json:"logs,omitempty"` + + // TookMs The time taken to query the logs in milliseconds + TookMs *int `json:"tookMs,omitempty"` + + // Total The total number of matching log entries, capped at 1000 + Total *int `json:"total,omitempty"` +} + +// LogsQueryResponseLogs0 defines model for . +type LogsQueryResponseLogs0 = []ComponentLogEntry + +// LogsQueryResponseLogs1 defines model for . +type LogsQueryResponseLogs1 = []WorkflowLogEntry + +// LogsQueryResponse_Logs The logs queried successfully +type LogsQueryResponse_Logs struct { + union json.RawMessage +} + +// WorkflowLogEntry defines model for WorkflowLogEntry. +type WorkflowLogEntry struct { + // Log The log message + Log *string `json:"log,omitempty"` + + // Timestamp The timestamp of the log entry + Timestamp *time.Time `json:"timestamp,omitempty"` +} + +// WorkflowSearchScope defines model for WorkflowSearchScope. +type WorkflowSearchScope struct { + Namespace string `json:"namespace"` + WorkflowRunName *string `json:"workflowRunName,omitempty"` +} + +// HandleAlertWebhookJSONBody defines parameters for HandleAlertWebhook. +type HandleAlertWebhookJSONBody = map[string]interface{} + +// QueryLogsJSONRequestBody defines body for QueryLogs for application/json ContentType. +type QueryLogsJSONRequestBody = LogsQueryRequest + +// CreateAlertRuleJSONRequestBody defines body for CreateAlertRule for application/json ContentType. +type CreateAlertRuleJSONRequestBody = AlertRuleRequest + +// UpdateAlertRuleJSONRequestBody defines body for UpdateAlertRule for application/json ContentType. +type UpdateAlertRuleJSONRequestBody = AlertRuleRequest + +// HandleAlertWebhookJSONRequestBody defines body for HandleAlertWebhook for application/json ContentType. +type HandleAlertWebhookJSONRequestBody = HandleAlertWebhookJSONBody + +// AsComponentSearchScope returns the union data inside the LogsQueryRequest_SearchScope as a ComponentSearchScope +func (t LogsQueryRequest_SearchScope) AsComponentSearchScope() (ComponentSearchScope, error) { + var body ComponentSearchScope + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromComponentSearchScope overwrites any union data inside the LogsQueryRequest_SearchScope as the provided ComponentSearchScope +func (t *LogsQueryRequest_SearchScope) FromComponentSearchScope(v ComponentSearchScope) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeComponentSearchScope performs a merge with any union data inside the LogsQueryRequest_SearchScope, using the provided ComponentSearchScope +func (t *LogsQueryRequest_SearchScope) MergeComponentSearchScope(v ComponentSearchScope) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWorkflowSearchScope returns the union data inside the LogsQueryRequest_SearchScope as a WorkflowSearchScope +func (t LogsQueryRequest_SearchScope) AsWorkflowSearchScope() (WorkflowSearchScope, error) { + var body WorkflowSearchScope + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWorkflowSearchScope overwrites any union data inside the LogsQueryRequest_SearchScope as the provided WorkflowSearchScope +func (t *LogsQueryRequest_SearchScope) FromWorkflowSearchScope(v WorkflowSearchScope) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWorkflowSearchScope performs a merge with any union data inside the LogsQueryRequest_SearchScope, using the provided WorkflowSearchScope +func (t *LogsQueryRequest_SearchScope) MergeWorkflowSearchScope(v WorkflowSearchScope) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t LogsQueryRequest_SearchScope) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *LogsQueryRequest_SearchScope) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsLogsQueryResponseLogs0 returns the union data inside the LogsQueryResponse_Logs as a LogsQueryResponseLogs0 +func (t LogsQueryResponse_Logs) AsLogsQueryResponseLogs0() (LogsQueryResponseLogs0, error) { + var body LogsQueryResponseLogs0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromLogsQueryResponseLogs0 overwrites any union data inside the LogsQueryResponse_Logs as the provided LogsQueryResponseLogs0 +func (t *LogsQueryResponse_Logs) FromLogsQueryResponseLogs0(v LogsQueryResponseLogs0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeLogsQueryResponseLogs0 performs a merge with any union data inside the LogsQueryResponse_Logs, using the provided LogsQueryResponseLogs0 +func (t *LogsQueryResponse_Logs) MergeLogsQueryResponseLogs0(v LogsQueryResponseLogs0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsLogsQueryResponseLogs1 returns the union data inside the LogsQueryResponse_Logs as a LogsQueryResponseLogs1 +func (t LogsQueryResponse_Logs) AsLogsQueryResponseLogs1() (LogsQueryResponseLogs1, error) { + var body LogsQueryResponseLogs1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromLogsQueryResponseLogs1 overwrites any union data inside the LogsQueryResponse_Logs as the provided LogsQueryResponseLogs1 +func (t *LogsQueryResponse_Logs) FromLogsQueryResponseLogs1(v LogsQueryResponseLogs1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeLogsQueryResponseLogs1 performs a merge with any union data inside the LogsQueryResponse_Logs, using the provided LogsQueryResponseLogs1 +func (t *LogsQueryResponse_Logs) MergeLogsQueryResponseLogs1(v LogsQueryResponseLogs1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t LogsQueryResponse_Logs) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *LogsQueryResponse_Logs) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} diff --git a/observability-logs-opensearch/internal/api/gen/server.gen.go b/observability-logs-opensearch/internal/api/gen/server.gen.go new file mode 100644 index 0000000..08e5efa --- /dev/null +++ b/observability-logs-opensearch/internal/api/gen/server.gen.go @@ -0,0 +1,980 @@ +//go:build go1.22 + +// Package gen provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.5.1 DO NOT EDIT. +package gen + +import ( + "bytes" + "compress/gzip" + "context" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "net/url" + "path" + "strings" + + "github.com/getkin/kin-openapi/openapi3" + "github.com/oapi-codegen/runtime" + strictnethttp "github.com/oapi-codegen/runtime/strictmiddleware/nethttp" +) + +// ServerInterface represents all server handlers. +type ServerInterface interface { + // Query logs + // (POST /api/v1/logs/query) + QueryLogs(w http.ResponseWriter, r *http.Request) + // Create alert rule + // (POST /api/v1alpha1/alerts/rules) + CreateAlertRule(w http.ResponseWriter, r *http.Request) + // Delete alert rule + // (DELETE /api/v1alpha1/alerts/rules/{ruleName}) + DeleteAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) + // Get alert rule + // (GET /api/v1alpha1/alerts/rules/{ruleName}) + GetAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) + // Update alert rule + // (PUT /api/v1alpha1/alerts/rules/{ruleName}) + UpdateAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) + // Handles triggered alerts from the alerting backend + // (POST /api/v1alpha1/alerts/webhook) + HandleAlertWebhook(w http.ResponseWriter, r *http.Request) + // Health check + // (GET /health) + Health(w http.ResponseWriter, r *http.Request) +} + +// ServerInterfaceWrapper converts contexts to parameters. +type ServerInterfaceWrapper struct { + Handler ServerInterface + HandlerMiddlewares []MiddlewareFunc + ErrorHandlerFunc func(w http.ResponseWriter, r *http.Request, err error) +} + +type MiddlewareFunc func(http.Handler) http.Handler + +// QueryLogs operation middleware +func (siw *ServerInterfaceWrapper) QueryLogs(w http.ResponseWriter, r *http.Request) { + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.QueryLogs(w, r) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// CreateAlertRule operation middleware +func (siw *ServerInterfaceWrapper) CreateAlertRule(w http.ResponseWriter, r *http.Request) { + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.CreateAlertRule(w, r) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// DeleteAlertRule operation middleware +func (siw *ServerInterfaceWrapper) DeleteAlertRule(w http.ResponseWriter, r *http.Request) { + + var err error + + // ------------- Path parameter "ruleName" ------------- + var ruleName string + + err = runtime.BindStyledParameterWithOptions("simple", "ruleName", r.PathValue("ruleName"), &ruleName, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + siw.ErrorHandlerFunc(w, r, &InvalidParamFormatError{ParamName: "ruleName", Err: err}) + return + } + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.DeleteAlertRule(w, r, ruleName) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// GetAlertRule operation middleware +func (siw *ServerInterfaceWrapper) GetAlertRule(w http.ResponseWriter, r *http.Request) { + + var err error + + // ------------- Path parameter "ruleName" ------------- + var ruleName string + + err = runtime.BindStyledParameterWithOptions("simple", "ruleName", r.PathValue("ruleName"), &ruleName, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + siw.ErrorHandlerFunc(w, r, &InvalidParamFormatError{ParamName: "ruleName", Err: err}) + return + } + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.GetAlertRule(w, r, ruleName) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// UpdateAlertRule operation middleware +func (siw *ServerInterfaceWrapper) UpdateAlertRule(w http.ResponseWriter, r *http.Request) { + + var err error + + // ------------- Path parameter "ruleName" ------------- + var ruleName string + + err = runtime.BindStyledParameterWithOptions("simple", "ruleName", r.PathValue("ruleName"), &ruleName, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true}) + if err != nil { + siw.ErrorHandlerFunc(w, r, &InvalidParamFormatError{ParamName: "ruleName", Err: err}) + return + } + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.UpdateAlertRule(w, r, ruleName) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// HandleAlertWebhook operation middleware +func (siw *ServerInterfaceWrapper) HandleAlertWebhook(w http.ResponseWriter, r *http.Request) { + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.HandleAlertWebhook(w, r) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +// Health operation middleware +func (siw *ServerInterfaceWrapper) Health(w http.ResponseWriter, r *http.Request) { + + handler := http.Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + siw.Handler.Health(w, r) + })) + + for _, middleware := range siw.HandlerMiddlewares { + handler = middleware(handler) + } + + handler.ServeHTTP(w, r) +} + +type UnescapedCookieParamError struct { + ParamName string + Err error +} + +func (e *UnescapedCookieParamError) Error() string { + return fmt.Sprintf("error unescaping cookie parameter '%s'", e.ParamName) +} + +func (e *UnescapedCookieParamError) Unwrap() error { + return e.Err +} + +type UnmarshalingParamError struct { + ParamName string + Err error +} + +func (e *UnmarshalingParamError) Error() string { + return fmt.Sprintf("Error unmarshaling parameter %s as JSON: %s", e.ParamName, e.Err.Error()) +} + +func (e *UnmarshalingParamError) Unwrap() error { + return e.Err +} + +type RequiredParamError struct { + ParamName string +} + +func (e *RequiredParamError) Error() string { + return fmt.Sprintf("Query argument %s is required, but not found", e.ParamName) +} + +type RequiredHeaderError struct { + ParamName string + Err error +} + +func (e *RequiredHeaderError) Error() string { + return fmt.Sprintf("Header parameter %s is required, but not found", e.ParamName) +} + +func (e *RequiredHeaderError) Unwrap() error { + return e.Err +} + +type InvalidParamFormatError struct { + ParamName string + Err error +} + +func (e *InvalidParamFormatError) Error() string { + return fmt.Sprintf("Invalid format for parameter %s: %s", e.ParamName, e.Err.Error()) +} + +func (e *InvalidParamFormatError) Unwrap() error { + return e.Err +} + +type TooManyValuesForParamError struct { + ParamName string + Count int +} + +func (e *TooManyValuesForParamError) Error() string { + return fmt.Sprintf("Expected one value for %s, got %d", e.ParamName, e.Count) +} + +// Handler creates http.Handler with routing matching OpenAPI spec. +func Handler(si ServerInterface) http.Handler { + return HandlerWithOptions(si, StdHTTPServerOptions{}) +} + +// ServeMux is an abstraction of http.ServeMux. +type ServeMux interface { + HandleFunc(pattern string, handler func(http.ResponseWriter, *http.Request)) + ServeHTTP(w http.ResponseWriter, r *http.Request) +} + +type StdHTTPServerOptions struct { + BaseURL string + BaseRouter ServeMux + Middlewares []MiddlewareFunc + ErrorHandlerFunc func(w http.ResponseWriter, r *http.Request, err error) +} + +// HandlerFromMux creates http.Handler with routing matching OpenAPI spec based on the provided mux. +func HandlerFromMux(si ServerInterface, m ServeMux) http.Handler { + return HandlerWithOptions(si, StdHTTPServerOptions{ + BaseRouter: m, + }) +} + +func HandlerFromMuxWithBaseURL(si ServerInterface, m ServeMux, baseURL string) http.Handler { + return HandlerWithOptions(si, StdHTTPServerOptions{ + BaseURL: baseURL, + BaseRouter: m, + }) +} + +// HandlerWithOptions creates http.Handler with additional options +func HandlerWithOptions(si ServerInterface, options StdHTTPServerOptions) http.Handler { + m := options.BaseRouter + + if m == nil { + m = http.NewServeMux() + } + if options.ErrorHandlerFunc == nil { + options.ErrorHandlerFunc = func(w http.ResponseWriter, r *http.Request, err error) { + http.Error(w, err.Error(), http.StatusBadRequest) + } + } + + wrapper := ServerInterfaceWrapper{ + Handler: si, + HandlerMiddlewares: options.Middlewares, + ErrorHandlerFunc: options.ErrorHandlerFunc, + } + + m.HandleFunc("POST "+options.BaseURL+"/api/v1/logs/query", wrapper.QueryLogs) + m.HandleFunc("POST "+options.BaseURL+"/api/v1alpha1/alerts/rules", wrapper.CreateAlertRule) + m.HandleFunc("DELETE "+options.BaseURL+"/api/v1alpha1/alerts/rules/{ruleName}", wrapper.DeleteAlertRule) + m.HandleFunc("GET "+options.BaseURL+"/api/v1alpha1/alerts/rules/{ruleName}", wrapper.GetAlertRule) + m.HandleFunc("PUT "+options.BaseURL+"/api/v1alpha1/alerts/rules/{ruleName}", wrapper.UpdateAlertRule) + m.HandleFunc("POST "+options.BaseURL+"/api/v1alpha1/alerts/webhook", wrapper.HandleAlertWebhook) + m.HandleFunc("GET "+options.BaseURL+"/health", wrapper.Health) + + return m +} + +type QueryLogsRequestObject struct { + Body *QueryLogsJSONRequestBody +} + +type QueryLogsResponseObject interface { + VisitQueryLogsResponse(w http.ResponseWriter) error +} + +type QueryLogs200JSONResponse LogsQueryResponse + +func (response QueryLogs200JSONResponse) VisitQueryLogsResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type QueryLogs400JSONResponse ErrorResponse + +func (response QueryLogs400JSONResponse) VisitQueryLogsResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type QueryLogs401JSONResponse ErrorResponse + +func (response QueryLogs401JSONResponse) VisitQueryLogsResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(401) + + return json.NewEncoder(w).Encode(response) +} + +type QueryLogs403JSONResponse ErrorResponse + +func (response QueryLogs403JSONResponse) VisitQueryLogsResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + +type QueryLogs500JSONResponse ErrorResponse + +func (response QueryLogs500JSONResponse) VisitQueryLogsResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type CreateAlertRuleRequestObject struct { + Body *CreateAlertRuleJSONRequestBody +} + +type CreateAlertRuleResponseObject interface { + VisitCreateAlertRuleResponse(w http.ResponseWriter) error +} + +type CreateAlertRule201JSONResponse AlertingRuleSyncResponse + +func (response CreateAlertRule201JSONResponse) VisitCreateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(201) + + return json.NewEncoder(w).Encode(response) +} + +type CreateAlertRule400JSONResponse ErrorResponse + +func (response CreateAlertRule400JSONResponse) VisitCreateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type CreateAlertRule409JSONResponse ErrorResponse + +func (response CreateAlertRule409JSONResponse) VisitCreateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(409) + + return json.NewEncoder(w).Encode(response) +} + +type CreateAlertRule500JSONResponse ErrorResponse + +func (response CreateAlertRule500JSONResponse) VisitCreateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type DeleteAlertRuleRequestObject struct { + RuleName string `json:"ruleName"` +} + +type DeleteAlertRuleResponseObject interface { + VisitDeleteAlertRuleResponse(w http.ResponseWriter) error +} + +type DeleteAlertRule200JSONResponse AlertingRuleSyncResponse + +func (response DeleteAlertRule200JSONResponse) VisitDeleteAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type DeleteAlertRule400JSONResponse ErrorResponse + +func (response DeleteAlertRule400JSONResponse) VisitDeleteAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type DeleteAlertRule404JSONResponse ErrorResponse + +func (response DeleteAlertRule404JSONResponse) VisitDeleteAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + + return json.NewEncoder(w).Encode(response) +} + +type DeleteAlertRule500JSONResponse ErrorResponse + +func (response DeleteAlertRule500JSONResponse) VisitDeleteAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type GetAlertRuleRequestObject struct { + RuleName string `json:"ruleName"` +} + +type GetAlertRuleResponseObject interface { + VisitGetAlertRuleResponse(w http.ResponseWriter) error +} + +type GetAlertRule200JSONResponse AlertRuleResponse + +func (response GetAlertRule200JSONResponse) VisitGetAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type GetAlertRule400JSONResponse ErrorResponse + +func (response GetAlertRule400JSONResponse) VisitGetAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type GetAlertRule404JSONResponse ErrorResponse + +func (response GetAlertRule404JSONResponse) VisitGetAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + + return json.NewEncoder(w).Encode(response) +} + +type GetAlertRule500JSONResponse ErrorResponse + +func (response GetAlertRule500JSONResponse) VisitGetAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type UpdateAlertRuleRequestObject struct { + RuleName string `json:"ruleName"` + Body *UpdateAlertRuleJSONRequestBody +} + +type UpdateAlertRuleResponseObject interface { + VisitUpdateAlertRuleResponse(w http.ResponseWriter) error +} + +type UpdateAlertRule200JSONResponse AlertingRuleSyncResponse + +func (response UpdateAlertRule200JSONResponse) VisitUpdateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type UpdateAlertRule400JSONResponse ErrorResponse + +func (response UpdateAlertRule400JSONResponse) VisitUpdateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type UpdateAlertRule500JSONResponse ErrorResponse + +func (response UpdateAlertRule500JSONResponse) VisitUpdateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type HandleAlertWebhookRequestObject struct { + Body *HandleAlertWebhookJSONRequestBody +} + +type HandleAlertWebhookResponseObject interface { + VisitHandleAlertWebhookResponse(w http.ResponseWriter) error +} + +type HandleAlertWebhook200JSONResponse AlertWebhookResponse + +func (response HandleAlertWebhook200JSONResponse) VisitHandleAlertWebhookResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type HandleAlertWebhook400JSONResponse ErrorResponse + +func (response HandleAlertWebhook400JSONResponse) VisitHandleAlertWebhookResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type HandleAlertWebhook500JSONResponse ErrorResponse + +func (response HandleAlertWebhook500JSONResponse) VisitHandleAlertWebhookResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(500) + + return json.NewEncoder(w).Encode(response) +} + +type HealthRequestObject struct { +} + +type HealthResponseObject interface { + VisitHealthResponse(w http.ResponseWriter) error +} + +type Health200JSONResponse struct { + Status *string `json:"status,omitempty"` +} + +func (response Health200JSONResponse) VisitHealthResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type Health503JSONResponse struct { + Error *string `json:"error,omitempty"` + Status *string `json:"status,omitempty"` +} + +func (response Health503JSONResponse) VisitHealthResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(503) + + return json.NewEncoder(w).Encode(response) +} + +// StrictServerInterface represents all server handlers. +type StrictServerInterface interface { + // Query logs + // (POST /api/v1/logs/query) + QueryLogs(ctx context.Context, request QueryLogsRequestObject) (QueryLogsResponseObject, error) + // Create alert rule + // (POST /api/v1alpha1/alerts/rules) + CreateAlertRule(ctx context.Context, request CreateAlertRuleRequestObject) (CreateAlertRuleResponseObject, error) + // Delete alert rule + // (DELETE /api/v1alpha1/alerts/rules/{ruleName}) + DeleteAlertRule(ctx context.Context, request DeleteAlertRuleRequestObject) (DeleteAlertRuleResponseObject, error) + // Get alert rule + // (GET /api/v1alpha1/alerts/rules/{ruleName}) + GetAlertRule(ctx context.Context, request GetAlertRuleRequestObject) (GetAlertRuleResponseObject, error) + // Update alert rule + // (PUT /api/v1alpha1/alerts/rules/{ruleName}) + UpdateAlertRule(ctx context.Context, request UpdateAlertRuleRequestObject) (UpdateAlertRuleResponseObject, error) + // Handles triggered alerts from the alerting backend + // (POST /api/v1alpha1/alerts/webhook) + HandleAlertWebhook(ctx context.Context, request HandleAlertWebhookRequestObject) (HandleAlertWebhookResponseObject, error) + // Health check + // (GET /health) + Health(ctx context.Context, request HealthRequestObject) (HealthResponseObject, error) +} + +type StrictHandlerFunc = strictnethttp.StrictHTTPHandlerFunc +type StrictMiddlewareFunc = strictnethttp.StrictHTTPMiddlewareFunc + +type StrictHTTPServerOptions struct { + RequestErrorHandlerFunc func(w http.ResponseWriter, r *http.Request, err error) + ResponseErrorHandlerFunc func(w http.ResponseWriter, r *http.Request, err error) +} + +func NewStrictHandler(ssi StrictServerInterface, middlewares []StrictMiddlewareFunc) ServerInterface { + return &strictHandler{ssi: ssi, middlewares: middlewares, options: StrictHTTPServerOptions{ + RequestErrorHandlerFunc: func(w http.ResponseWriter, r *http.Request, err error) { + http.Error(w, err.Error(), http.StatusBadRequest) + }, + ResponseErrorHandlerFunc: func(w http.ResponseWriter, r *http.Request, err error) { + http.Error(w, err.Error(), http.StatusInternalServerError) + }, + }} +} + +func NewStrictHandlerWithOptions(ssi StrictServerInterface, middlewares []StrictMiddlewareFunc, options StrictHTTPServerOptions) ServerInterface { + return &strictHandler{ssi: ssi, middlewares: middlewares, options: options} +} + +type strictHandler struct { + ssi StrictServerInterface + middlewares []StrictMiddlewareFunc + options StrictHTTPServerOptions +} + +// QueryLogs operation middleware +func (sh *strictHandler) QueryLogs(w http.ResponseWriter, r *http.Request) { + var request QueryLogsRequestObject + + var body QueryLogsJSONRequestBody + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + sh.options.RequestErrorHandlerFunc(w, r, fmt.Errorf("can't decode JSON body: %w", err)) + return + } + request.Body = &body + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.QueryLogs(ctx, request.(QueryLogsRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "QueryLogs") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(QueryLogsResponseObject); ok { + if err := validResponse.VisitQueryLogsResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// CreateAlertRule operation middleware +func (sh *strictHandler) CreateAlertRule(w http.ResponseWriter, r *http.Request) { + var request CreateAlertRuleRequestObject + + var body CreateAlertRuleJSONRequestBody + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + sh.options.RequestErrorHandlerFunc(w, r, fmt.Errorf("can't decode JSON body: %w", err)) + return + } + request.Body = &body + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.CreateAlertRule(ctx, request.(CreateAlertRuleRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "CreateAlertRule") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(CreateAlertRuleResponseObject); ok { + if err := validResponse.VisitCreateAlertRuleResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// DeleteAlertRule operation middleware +func (sh *strictHandler) DeleteAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) { + var request DeleteAlertRuleRequestObject + + request.RuleName = ruleName + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.DeleteAlertRule(ctx, request.(DeleteAlertRuleRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "DeleteAlertRule") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(DeleteAlertRuleResponseObject); ok { + if err := validResponse.VisitDeleteAlertRuleResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// GetAlertRule operation middleware +func (sh *strictHandler) GetAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) { + var request GetAlertRuleRequestObject + + request.RuleName = ruleName + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.GetAlertRule(ctx, request.(GetAlertRuleRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "GetAlertRule") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(GetAlertRuleResponseObject); ok { + if err := validResponse.VisitGetAlertRuleResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// UpdateAlertRule operation middleware +func (sh *strictHandler) UpdateAlertRule(w http.ResponseWriter, r *http.Request, ruleName string) { + var request UpdateAlertRuleRequestObject + + request.RuleName = ruleName + + var body UpdateAlertRuleJSONRequestBody + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + sh.options.RequestErrorHandlerFunc(w, r, fmt.Errorf("can't decode JSON body: %w", err)) + return + } + request.Body = &body + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.UpdateAlertRule(ctx, request.(UpdateAlertRuleRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "UpdateAlertRule") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(UpdateAlertRuleResponseObject); ok { + if err := validResponse.VisitUpdateAlertRuleResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// HandleAlertWebhook operation middleware +func (sh *strictHandler) HandleAlertWebhook(w http.ResponseWriter, r *http.Request) { + var request HandleAlertWebhookRequestObject + + var body HandleAlertWebhookJSONRequestBody + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + sh.options.RequestErrorHandlerFunc(w, r, fmt.Errorf("can't decode JSON body: %w", err)) + return + } + request.Body = &body + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.HandleAlertWebhook(ctx, request.(HandleAlertWebhookRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "HandleAlertWebhook") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(HandleAlertWebhookResponseObject); ok { + if err := validResponse.VisitHandleAlertWebhookResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// Health operation middleware +func (sh *strictHandler) Health(w http.ResponseWriter, r *http.Request) { + var request HealthRequestObject + + handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, request interface{}) (interface{}, error) { + return sh.ssi.Health(ctx, request.(HealthRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "Health") + } + + response, err := handler(r.Context(), w, r, request) + + if err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } else if validResponse, ok := response.(HealthResponseObject); ok { + if err := validResponse.VisitHealthResponse(w); err != nil { + sh.options.ResponseErrorHandlerFunc(w, r, err) + } + } else if response != nil { + sh.options.ResponseErrorHandlerFunc(w, r, fmt.Errorf("unexpected response type: %T", response)) + } +} + +// Base64 encoded, gzipped, json marshaled Swagger object +var swaggerSpec = []string{ + + "H4sIAAAAAAAC/+xa2XPbuBn/VzBoZ7adoSU5SR9Wb47jZN1646ydNA+JpwMRn0isQYAGQDuKx/97Bwcv", + "CdQV5+g0L4lMgN99/PAR9ziVRSkFCKPx9B7rNIeCuJ9HHJS5qDhcwE0F2thnpZIlKMPA7UiloMwwKVaX", + "QJAZB2p/UtCpYqXfh9/nYHJQyOSAiOWAVMUBMY3qVxJsFiXgKZ5JyYEI/JBgJgyoW8JX6b3NAdWrSM6R", + "YQUgI9FNBWqB5nKZU0teG8VEZqlbwYmRKk69XrVUKw1xmiCqAk8/4MzgBGfGPuLG/eNWb3CCBdzgqwh3", + "kyvQueQ0zr5ZRreEV7BWikBbVMUMlKV9xwSVd3HCfm0/mz0kWMFNxZR18Qfcui4w7HisY96urq0l5OxP", + "SI2VtgBDKDEkFmkhSN+xATOdlyCOc6lAomYzenf6otELJ3guVUEMnuKqYjQWCCBumZKi2JJRZ/vOrAQp", + "IM7ArjivbIxbu1OXJF1DyC2vUkPHFzGCpZLWF9voHrbuqPdS3DgjdPXoibDij6QfB7EQ0rJS3h79APLy", + "RZXyUW8kgk+QVsanFpcZmhEN1BtNb1TFM1gVaWlbE+KNqEmnjMY06pRhXUqh4Wcd/lmHO0H4s4r+P1bR", + "rQtfAUaxNC6IX+tHXHi2VPvqtErL6j+VJpk1ZAGFVIvwZyyhvkrNXS2v8Yr5Hma5lNfDRbMA7SQfsIxb", + "7Lv8zpOMuVwbYiodp+XXhkjVltVVmoJ2tlZKqohBB1VlIrP94XIh0mF1SVo3iFUJ/Roy5BoEsj+Gqmqq", + "gBjXGqqS1r9EmhORud8UONinsWjgRBsrItAjM1BhWQHakKKsbWVfQXoh0pjJrWjPSXoNgp4OJNrML6PT", + "F+hvNsPmShZIzrRtUjPGmVnUW/6+Xamwz89kxlLCh3hyv+x42tKxJeVdA2jJMdoZ1lYOwnjUAbHoOa5L", + "/JnMToTxydoPGw63wAc1RX455m2ZDb9Vp17kvW7zihYst9oEiMwQOMGToVb3erBDRHudaxsmJwZlICze", + "AFpzion7JR11iMnGppdKYQgToIZ1a7bsqlCnGW9luW7z3p/VXjBhb/s1HX0rDdv+v6N+paTDDP5VzUAJ", + "MKBRKemepHdBLUsMd+DlIcpWtqrhzK7q7AmY9oyAWClses+2ralTeRp2tiUe2K14t/J7CUSl+WUqS9gM", + "2rfIo/UQd4P5N5/YPaXYWfXEQpdhDOKQzbGkA4HkllEqKXQ7NShk/2PuqAyfSFFyy/T8+eXBvw8Pzg6e", + "PIn3kQF091tVEHGggFB7yg0824bUMvidac1Ehmrt0ZwBpxr9og1R5i0r4BdEBEW/gKDur5gYhhm+VtsO", + "59DKZ4TWA1cLrkhlcqnYZ9/dpZoxSkHYY6w0L2Ul/FREzDlLTT12E4RfOsud7AAlz2Sm/7CwfHDcGxQd", + "UEdQf34NCbJylFmTHAnmrGABFc5JxQ2eHk4mSQwBkE+sqArkj9aWGTNQaHuUUGAqZS0T9jgakwQXTIQ/", + "G8bWSpk/mXOZnVkQ4zR0tLyq3hkvTp6/e4UTfPr65TlO8Puji9c4wScXF+cX8RmCf0CUIgvnPnZTwamn", + "alQFFum5ZH+TK6Lj6aj71UAKOJ/j6Yd7/FcFczzFfxm3I/txmNePo7XkIVn/0nuprudc3vXeuXKHSWXO", + "FQXVc4lzB455xe5H0r6w7P7akKR5M2q3JqcGEbAye4fXUglreSVNSPetfrU+PYbKG5eZHgS92onMgKJw", + "0ptXnFsNWv820beVoxvMvhx21utbkardP0zJxoKR8vp3PdwWw7GxGSKYWl8mUME4ZxpSKWjnNN/JPiPN", + "0DDRLXXyvCAmzW1BrlsvA52glJQlUEQMsske4RErdSuKx1y51/Hle8CIWBavKLQeAdwFEheVqHHe/jDg", + "wc2J5zJMpg1JTS2B7dstlnsLpLDcl+3ENDp6c4p0CSmbs5S44QSFOROgndUsVUVS4xEgsVbMbGQQSkoD", + "ChWVNojZPm5B0UdhpBtNZxYnojtmckelI8l5QBoj5OBmjTtSwrnjqF1/KyUTxraaj8KHugtziwEKIkjW", + "PZprj2CCi51w9TCiVPKWUaBo5nOlkLTiMPpoOxdnKYTSEsx1VJI0B/RkZGO7UhxPcW5Mqafj8d3d3Yi4", + "5ZFU2Ti8q8dnp8cnry9PDp6MJqPcFLyDQ/CKzvUcxJY3dBTsd/TmFCf4FpT2LjkcTUaTMKAXpGR4ip+O", + "JqOn9tRNTO4CbExKNr49HFubjJvJXyl1ZNjzR2u9xkwRsOcH/kyKU1q/ZMXEPhRBm+eSLuooA+EYkbLk", + "IWTGf2o/8PL1blM1XME/D/2gD+1bhfLvlH4ymXwN/qHBOAH6ljsbbCQPCX62lTQNwu3hcYw7mHk/7BtC", + "rINfbWPfSvf+uSGi96m4JZxRpFrKzyaHj6RtTdzC8aC4kdcOZNdK9XD446n1bonss8nTR9LpsnLVGH2s", + "JpOn6afFZ/cDUE40EhKVoJyq0jXuWwZ3Ph3lHDWDBTSXMkFvwoF7RlSCGvCBZuSzbfAnnaEMtTBalvZ3", + "x3LtkeXxzPayS/MfXxL17Sny1575OgrETlSPGdieOvLkUaBvGVRFQWwV7ZRLKxaxIPODqwT4ym4MlZfw", + "MieHY//pZOya0HAFPnaDfERE79uv2K4S+5eb79JfqR6vXD/aqh4fPi7/2KeViBePWiOGTyR7FuevWS5/", + "/Xb8O/YgXAGhCwSfmDZ6+4T9hvlVJ0PvA01IsyP/JXJDoo3v7X+2cj74XONgIkfZF+75nlnnX+5n3VeC", + "InuGfvj69wOG/rPvEvpCGjR3I7ofMerrYFwb9QnOINI+XoFZiuKh485KGL8C8+1iuHdtar2zFBjF4PZn", + "+P6PhK8LwQ2xWxJFCjCgtJuv7XBJiNkd9myL6ztGuK7weBmEdLHg8tTkKsFlFUmgd+76RLwTbMog/+6P", + "Cb++ew8K91J+uCT+4fKnjsC9QE99fWnwfPEbEZSDRkaxLAPVXOlq+wQJ/h0Mc0+ie5frCyJ9NfUDJTST", + "dIFSItDMpuMC/fPy/DXyo8wEEY0om89B2fPtssQaFWSBNAja2VSSBZeE6hGOzkW/cf4sX4IbzJ3gUJQ7", + "o/9Mn43ps1eAR/MrB8JNboWOQq3jHNJrR9BvXLoNtnxsGK3mkaf/haHW/5rQ3lZrv5d78RZbXdpcMfml", + "lx4xjWo6zutPv0BIf4+yJ6MsQfgPfVOUSiHAX3wMl+bWXstriVTisVRtKS0Fl/d0al3fCZvgyStHNDy8", + "Xx0Q11CW8PajRQtj3NzoIbkfbqH+Y4Yb6UXeD7G7SqErdOzFIP3D1cN/AwAA//+wJDwhdDYAAA==", +} + +// GetSwagger returns the content of the embedded swagger specification file +// or error if failed to decode +func decodeSpec() ([]byte, error) { + zipped, err := base64.StdEncoding.DecodeString(strings.Join(swaggerSpec, "")) + if err != nil { + return nil, fmt.Errorf("error base64 decoding spec: %w", err) + } + zr, err := gzip.NewReader(bytes.NewReader(zipped)) + if err != nil { + return nil, fmt.Errorf("error decompressing spec: %w", err) + } + var buf bytes.Buffer + _, err = buf.ReadFrom(zr) + if err != nil { + return nil, fmt.Errorf("error decompressing spec: %w", err) + } + + return buf.Bytes(), nil +} + +var rawSpec = decodeSpecCached() + +// a naive cached of a decoded swagger spec +func decodeSpecCached() func() ([]byte, error) { + data, err := decodeSpec() + return func() ([]byte, error) { + return data, err + } +} + +// Constructs a synthetic filesystem for resolving external references when loading openapi specifications. +func PathToRawSpec(pathToFile string) map[string]func() ([]byte, error) { + res := make(map[string]func() ([]byte, error)) + if len(pathToFile) > 0 { + res[pathToFile] = rawSpec + } + + return res +} + +// GetSwagger returns the Swagger specification corresponding to the generated code +// in this file. The external references of Swagger specification are resolved. +// The logic of resolving external references is tightly connected to "import-mapping" feature. +// Externally referenced files must be embedded in the corresponding golang packages. +// Urls can be supported but this task was out of the scope. +func GetSwagger() (swagger *openapi3.T, err error) { + resolvePath := PathToRawSpec("") + + loader := openapi3.NewLoader() + loader.IsExternalRefsAllowed = true + loader.ReadFromURIFunc = func(loader *openapi3.Loader, url *url.URL) ([]byte, error) { + pathToFile := url.String() + pathToFile = path.Clean(pathToFile) + getSpec, ok := resolvePath[pathToFile] + if !ok { + err1 := fmt.Errorf("path not found: %s", pathToFile) + return nil, err1 + } + return getSpec() + } + var specData []byte + specData, err = rawSpec() + if err != nil { + return + } + swagger, err = loader.LoadFromData(specData) + if err != nil { + return + } + return +} diff --git a/observability-logs-opensearch/internal/config.go b/observability-logs-opensearch/internal/config.go new file mode 100644 index 0000000..ca41526 --- /dev/null +++ b/observability-logs-opensearch/internal/config.go @@ -0,0 +1,100 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package app + +import ( + "fmt" + "log/slog" + "net/url" + "os" + "strconv" + "strings" +) + +// Config holds the application configuration. +type Config struct { + ServerPort string + OpenSearchAddress string + OpenSearchUsername string + OpenSearchPassword string + OpenSearchIndexPrefix string + TLSSkipVerify bool + ObserverURL string + LogLevel slog.Level +} + +// LoadConfig loads configuration from environment variables. +func LoadConfig() (*Config, error) { + serverPort := getEnv("SERVER_PORT", "9098") + openSearchAddress := getEnv("OPENSEARCH_ADDRESS", "") + openSearchUsername := getEnv("OPENSEARCH_USERNAME", "") + openSearchPassword := getEnv("OPENSEARCH_PASSWORD", "") + openSearchIndexPrefix := getEnv("OPENSEARCH_INDEX_PREFIX", "container-logs-") + observerURL := getEnv("OBSERVER_URL", "") + + tlsSkipVerify := true + if v := os.Getenv("OPENSEARCH_TLS_SKIP_VERIFY"); v != "" { + parsed, err := strconv.ParseBool(v) + if err != nil { + return nil, fmt.Errorf("invalid OPENSEARCH_TLS_SKIP_VERIFY value: %w", err) + } + tlsSkipVerify = parsed + } + + logLevel := slog.LevelInfo + if level := os.Getenv("LOG_LEVEL"); level != "" { + switch strings.ToUpper(level) { + case "DEBUG": + logLevel = slog.LevelDebug + case "INFO": + logLevel = slog.LevelInfo + case "WARN", "WARNING": + logLevel = slog.LevelWarn + case "ERROR": + logLevel = slog.LevelError + } + } + + if openSearchAddress == "" { + return nil, fmt.Errorf("environment variable OPENSEARCH_ADDRESS is required") + } + + if openSearchUsername == "" { + return nil, fmt.Errorf("environment variable OPENSEARCH_USERNAME is required") + } + + if openSearchPassword == "" { + return nil, fmt.Errorf("environment variable OPENSEARCH_PASSWORD is required") + } + + if observerURL == "" { + return nil, fmt.Errorf("environment variable OBSERVER_URL is required") + } + parsedURL, err := url.Parse(observerURL) + if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" { + return nil, fmt.Errorf("OBSERVER_URL must be a valid URL with scheme and host, got: %q", observerURL) + } + + if _, err := strconv.Atoi(serverPort); err != nil { + return nil, fmt.Errorf("invalid SERVER_PORT: %w", err) + } + + return &Config{ + ServerPort: serverPort, + OpenSearchAddress: openSearchAddress, + OpenSearchUsername: openSearchUsername, + OpenSearchPassword: openSearchPassword, + OpenSearchIndexPrefix: openSearchIndexPrefix, + TLSSkipVerify: tlsSkipVerify, + ObserverURL: observerURL, + LogLevel: logLevel, + }, nil +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} diff --git a/observability-logs-opensearch/internal/handlers.go b/observability-logs-opensearch/internal/handlers.go new file mode 100644 index 0000000..bc80abb --- /dev/null +++ b/observability-logs-opensearch/internal/handlers.go @@ -0,0 +1,878 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package app + +import ( + "context" + "fmt" + "log/slog" + "strconv" + "strings" + "time" + + "github.com/google/uuid" + openapi_types "github.com/oapi-codegen/runtime/types" + + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/api/gen" + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/observer" + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/opensearch" +) + +// LogsHandler implements the generated StrictServerInterface. +type LogsHandler struct { + osClient *opensearch.Client + queryBuilder *opensearch.QueryBuilder + observerClient *observer.Client + logger *slog.Logger +} + +// NewLogsHandler creates a new LogsHandler. +func NewLogsHandler( + osClient *opensearch.Client, + queryBuilder *opensearch.QueryBuilder, + observerClient *observer.Client, + logger *slog.Logger, +) *LogsHandler { + return &LogsHandler{ + osClient: osClient, + queryBuilder: queryBuilder, + observerClient: observerClient, + logger: logger, + } +} + +// Ensure LogsHandler implements the interface at compile time. +var _ gen.StrictServerInterface = (*LogsHandler)(nil) + +// Health implements the health check endpoint. +func (h *LogsHandler) Health(_ context.Context, _ gen.HealthRequestObject) (gen.HealthResponseObject, error) { + status := "healthy" + return gen.Health200JSONResponse{Status: &status}, nil +} + +// QueryLogs implements POST /api/v1/logs/query. +func (h *LogsHandler) QueryLogs(ctx context.Context, request gen.QueryLogsRequestObject) (gen.QueryLogsResponseObject, error) { + if request.Body == nil { + return gen.QueryLogs400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("request body is required"), + }, nil + } + + // Try to interpret the search scope as a WorkflowSearchScope first + workflowScope, err := request.Body.SearchScope.AsWorkflowSearchScope() + if err == nil && workflowScope.WorkflowRunName != nil { + if strings.TrimSpace(workflowScope.Namespace) == "" { + return gen.QueryLogs400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("searchScope with a valid namespace is required"), + }, nil + } + + return h.queryWorkflowLogs(ctx, request.Body, &workflowScope) + } + + // Fall back to ComponentSearchScope + scope, err := request.Body.SearchScope.AsComponentSearchScope() + if err != nil || strings.TrimSpace(scope.Namespace) == "" { + return gen.QueryLogs400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("searchScope with a valid namespace is required"), + }, nil + } + + return h.queryComponentLogs(ctx, request.Body, &scope) +} + +func (h *LogsHandler) queryComponentLogs(ctx context.Context, req *gen.LogsQueryRequest, scope *gen.ComponentSearchScope) (gen.QueryLogsResponseObject, error) { + startTime := req.StartTime.Format(time.RFC3339) + endTime := req.EndTime.Format(time.RFC3339) + + params := opensearch.ComponentLogsQueryParamsV1{ + StartTime: startTime, + EndTime: endTime, + NamespaceName: scope.Namespace, + } + if scope.ProjectUid != nil { + params.ProjectID = *scope.ProjectUid + } + if scope.EnvironmentUid != nil { + params.EnvironmentID = *scope.EnvironmentUid + } + if scope.ComponentUid != nil { + params.ComponentID = *scope.ComponentUid + } + if req.Limit != nil { + params.Limit = *req.Limit + } + if req.SortOrder != nil { + params.SortOrder = string(*req.SortOrder) + } + if req.SearchPhrase != nil { + params.SearchPhrase = *req.SearchPhrase + } + if req.LogLevels != nil { + levels := make([]string, len(*req.LogLevels)) + for i, l := range *req.LogLevels { + levels[i] = string(l) + } + params.LogLevels = levels + } + + query, err := h.queryBuilder.BuildComponentLogsQueryV1(params) + if err != nil { + h.logger.Error("Failed to build component logs query", + slog.String("function", "QueryLogs"), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + indices, err := h.queryBuilder.GenerateIndices(startTime, endTime) + if err != nil { + h.logger.Error("Failed to generate indices", + slog.String("function", "QueryLogs"), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + result, err := h.osClient.Search(ctx, indices, query) + if err != nil { + h.logger.Error("Failed to query component logs", + slog.String("function", "QueryLogs"), + slog.String("namespace", scope.Namespace), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + entries := make([]gen.ComponentLogEntry, 0, len(result.Hits.Hits)) + for _, hit := range result.Hits.Hits { + logEntry := opensearch.ParseLogEntry(hit) + entry := toComponentLogEntry(&logEntry) + entries = append(entries, entry) + } + + total := result.Hits.Total.Value + took := result.Took + resp := gen.LogsQueryResponse{ + Total: &total, + TookMs: &took, + } + logs := gen.LogsQueryResponse_Logs{} + _ = logs.FromLogsQueryResponseLogs0(entries) + resp.Logs = &logs + + return gen.QueryLogs200JSONResponse(resp), nil +} + +func (h *LogsHandler) queryWorkflowLogs(ctx context.Context, req *gen.LogsQueryRequest, scope *gen.WorkflowSearchScope) (gen.QueryLogsResponseObject, error) { + startTime := req.StartTime.Format(time.RFC3339) + endTime := req.EndTime.Format(time.RFC3339) + + queryParams := opensearch.QueryParams{ + StartTime: startTime, + EndTime: endTime, + NamespaceName: scope.Namespace, + Limit: 100, + SortOrder: "desc", + } + if req.Limit != nil { + queryParams.Limit = *req.Limit + } + if req.SortOrder != nil { + queryParams.SortOrder = string(*req.SortOrder) + } + if req.SearchPhrase != nil { + queryParams.SearchPhrase = *req.SearchPhrase + } + if req.LogLevels != nil { + levels := make([]string, len(*req.LogLevels)) + for i, l := range *req.LogLevels { + levels[i] = string(l) + } + queryParams.LogLevels = levels + } + + workflowRunName := "" + if scope.WorkflowRunName != nil { + workflowRunName = *scope.WorkflowRunName + } + + params := opensearch.WorkflowRunQueryParams{ + QueryParams: queryParams, + WorkflowRunID: workflowRunName, + } + + query := h.queryBuilder.BuildWorkflowRunLogsQuery(params) + + indices, err := h.queryBuilder.GenerateIndices(startTime, endTime) + if err != nil { + h.logger.Error("Failed to generate indices", + slog.String("function", "QueryLogs"), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + result, err := h.osClient.Search(ctx, indices, query) + if err != nil { + h.logger.Error("Failed to query workflow logs", + slog.String("function", "QueryLogs"), + slog.String("namespace", scope.Namespace), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + entries := make([]gen.WorkflowLogEntry, 0, len(result.Hits.Hits)) + for _, hit := range result.Hits.Hits { + var ts *time.Time + if tsVal, ok := hit.Source["@timestamp"].(string); ok { + if parsed, err := time.Parse(time.RFC3339, tsVal); err == nil { + ts = &parsed + } + } + log := "" + if logVal, ok := hit.Source["log"].(string); ok { + log = logVal + } + entry := gen.WorkflowLogEntry{ + Timestamp: ts, + Log: &log, + } + entries = append(entries, entry) + } + + total := result.Hits.Total.Value + took := result.Took + resp := gen.LogsQueryResponse{ + Total: &total, + TookMs: &took, + } + logs := gen.LogsQueryResponse_Logs{} + _ = logs.FromLogsQueryResponseLogs1(entries) + resp.Logs = &logs + + return gen.QueryLogs200JSONResponse(resp), nil +} + +// CreateAlertRule implements POST /api/v1alpha1/alerts/rules. +func (h *LogsHandler) CreateAlertRule(ctx context.Context, request gen.CreateAlertRuleRequestObject) (gen.CreateAlertRuleResponseObject, error) { + if request.Body == nil { + return gen.CreateAlertRule400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("request body is required"), + }, nil + } + + params := toAlertingRuleRequest(request.Body) + + monitorBody, err := h.queryBuilder.BuildLogAlertingRuleMonitorBody(params) + if err != nil { + h.logger.Error("Failed to build monitor body", + slog.String("function", "CreateAlertRule"), + slog.Any("error", err), + ) + return gen.CreateAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + alertID, _, err := h.osClient.CreateMonitor(ctx, monitorBody) + if err != nil { + h.logger.Error("Failed to create monitor", + slog.String("function", "CreateAlertRule"), + slog.Any("alertName", params.Metadata.Name), + slog.Any("error", err), + ) + return gen.CreateAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + now := time.Now().UTC().Format(time.RFC3339) + return gen.CreateAlertRule201JSONResponse{ + Action: ptr(gen.Created), + Status: ptr(gen.Synced), + RuleLogicalId: ¶ms.Metadata.Name, + RuleBackendId: &alertID, + LastSyncedAt: &now, + }, nil +} + +// DeleteAlertRule implements DELETE /api/v1alpha1/alerts/rules/{ruleName}. +func (h *LogsHandler) DeleteAlertRule(ctx context.Context, request gen.DeleteAlertRuleRequestObject) (gen.DeleteAlertRuleResponseObject, error) { + monitorID, found, err := h.osClient.SearchMonitorByName(ctx, request.RuleName) + if err != nil { + h.logger.Error("Failed to search for monitor", + slog.String("function", "DeleteAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.DeleteAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + if !found { + return gen.DeleteAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("alert rule not found"), + }, nil + } + + if err := h.osClient.DeleteMonitor(ctx, monitorID); err != nil { + h.logger.Error("Failed to delete monitor", + slog.String("function", "DeleteAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.DeleteAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + now := time.Now().UTC().Format(time.RFC3339) + return gen.DeleteAlertRule200JSONResponse{ + Action: ptr(gen.Deleted), + Status: ptr(gen.Synced), + RuleLogicalId: &request.RuleName, + RuleBackendId: &monitorID, + LastSyncedAt: &now, + }, nil +} + +// GetAlertRule implements GET /api/v1alpha1/alerts/rules/{ruleName}. +func (h *LogsHandler) GetAlertRule(ctx context.Context, request gen.GetAlertRuleRequestObject) (gen.GetAlertRuleResponseObject, error) { + monitorID, found, err := h.osClient.SearchMonitorByName(ctx, request.RuleName) + if err != nil { + h.logger.Error("Failed to search for monitor", + slog.String("function", "GetAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.GetAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + if !found { + return gen.GetAlertRule404JSONResponse{ + Title: ptr(gen.NotFound), + Message: ptr("alert rule not found"), + }, nil + } + + monitor, err := h.osClient.GetMonitorByID(ctx, monitorID) + if err != nil { + h.logger.Error("Failed to get monitor", + slog.String("function", "GetAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.GetAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + response, err := parseMonitorToAlertRuleResponse(monitor) + if err != nil { + h.logger.Error("Failed to parse monitor to alert rule response", + slog.String("function", "GetAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.GetAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + return gen.GetAlertRule200JSONResponse(response), nil +} + +// UpdateAlertRule implements PUT /api/v1alpha1/alerts/rules/{ruleName}. +func (h *LogsHandler) UpdateAlertRule(ctx context.Context, request gen.UpdateAlertRuleRequestObject) (gen.UpdateAlertRuleResponseObject, error) { + if request.Body == nil { + return gen.UpdateAlertRule400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("request body is required"), + }, nil + } + + monitorID, found, err := h.osClient.SearchMonitorByName(ctx, request.RuleName) + if err != nil { + h.logger.Error("Failed to search for monitor", + slog.String("function", "UpdateAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.UpdateAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + if !found { + return gen.UpdateAlertRule400JSONResponse{ + Title: ptr(gen.BadRequest), + Message: ptr("alert rule not found"), + }, nil + } + + params := toAlertingRuleRequest(request.Body) + + monitorBody, err := h.queryBuilder.BuildLogAlertingRuleMonitorBody(params) + if err != nil { + h.logger.Error("Failed to build monitor body", + slog.String("function", "UpdateAlertRule"), + slog.Any("error", err), + ) + return gen.UpdateAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + if _, err := h.osClient.UpdateMonitor(ctx, monitorID, monitorBody); err != nil { + h.logger.Error("Failed to update monitor", + slog.String("function", "UpdateAlertRule"), + slog.String("ruleName", request.RuleName), + slog.Any("error", err), + ) + return gen.UpdateAlertRule500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } + + now := time.Now().UTC().Format(time.RFC3339) + return gen.UpdateAlertRule200JSONResponse{ + Action: ptr(gen.Updated), + Status: ptr(gen.Synced), + RuleLogicalId: &request.RuleName, + RuleBackendId: &monitorID, + LastSyncedAt: &now, + }, nil +} + +// HandleAlertWebhook implements POST /api/v1alpha1/alerts/webhook. +func (h *LogsHandler) HandleAlertWebhook(_ context.Context, request gen.HandleAlertWebhookRequestObject) (gen.HandleAlertWebhookResponseObject, error) { + if request.Body == nil { + h.logger.Warn("Alert webhook received with nil body") + return gen.HandleAlertWebhook200JSONResponse{ + Message: ptr("alert webhook received successfully"), + Status: ptr(gen.Success), + }, nil + } + body := *request.Body + + ruleName, ruleNamespace, alertValue, alertTimestamp, err := parseAlertWebhookBody(body) + if err != nil { + h.logger.Error("Failed to parse alert webhook body", slog.Any("error", err)) + return gen.HandleAlertWebhook200JSONResponse{ + Message: ptr("alert webhook received successfully"), + Status: ptr(gen.Success), + }, nil + } + + go func() { + forwardCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if err := h.observerClient.ForwardAlert(forwardCtx, ruleName, ruleNamespace, alertValue, alertTimestamp); err != nil { + h.logger.Error("Failed to forward alert webhook to observer API", + slog.Any("error", err), + ) + } + }() + + return gen.HandleAlertWebhook200JSONResponse{ + Message: ptr("alert webhook received successfully"), + Status: ptr(gen.Success), + }, nil +} + +// parseAlertWebhookBody extracts alert fields from the incoming OpenSearch alerting webhook body. +func parseAlertWebhookBody(body map[string]interface{}) (ruleName string, ruleNamespace string, alertValue float64, alertTimestamp time.Time, err error) { + ruleNameVal, ok := body["ruleName"] + if !ok { + return "", "", 0, time.Time{}, fmt.Errorf("missing ruleName in webhook body") + } + ruleName, ok = ruleNameVal.(string) + if !ok { + return "", "", 0, time.Time{}, fmt.Errorf("ruleName is not a string") + } + + if nsVal, ok := body["ruleNamespace"]; ok { + if ns, ok := nsVal.(string); ok { + ruleNamespace = ns + } + } + + if countVal, ok := body["alertValue"]; ok { + switch v := countVal.(type) { + case float64: + alertValue = v + case string: + alertValue, err = strconv.ParseFloat(v, 64) + if err != nil { + return "", "", 0, time.Time{}, fmt.Errorf("failed to parse alertValue %q: %w", v, err) + } + } + } + + if tsVal, ok := body["alertTimestamp"]; ok { + switch v := tsVal.(type) { + case string: + parsed, parseErr := time.Parse(time.RFC3339, v) + if parseErr != nil { + alertTimestamp = time.Now() + } else { + alertTimestamp = parsed + } + default: + alertTimestamp = time.Now() + } + } else { + alertTimestamp = time.Now() + } + + return ruleName, ruleNamespace, alertValue, alertTimestamp, nil +} + +// toAlertingRuleRequest converts the generated AlertRuleRequest to the internal type. +func toAlertingRuleRequest(req *gen.AlertRuleRequest) opensearch.AlertingRuleRequest { + return opensearch.AlertingRuleRequest{ + Metadata: opensearch.AlertingRuleMetadata{ + Name: req.Metadata.Name, + Namespace: req.Metadata.Namespace, + ProjectUID: req.Metadata.ProjectUid.String(), + EnvironmentUID: req.Metadata.EnvironmentUid.String(), + ComponentUID: req.Metadata.ComponentUid.String(), + }, + Source: opensearch.AlertingRuleSource{ + Query: req.Source.Query, + }, + Condition: opensearch.AlertingRuleCondition{ + Enabled: req.Condition.Enabled, + Window: req.Condition.Window, + Interval: req.Condition.Interval, + Operator: string(req.Condition.Operator), + Threshold: float64(req.Condition.Threshold), + }, + } +} + +// parseMonitorToAlertRuleResponse parses an OpenSearch monitor to the API AlertRuleResponse. +func parseMonitorToAlertRuleResponse(monitor map[string]interface{}) (gen.AlertRuleResponse, error) { + name := getStringFromMap(monitor, "name") + + // Extract metadata from the monitor's trigger action message template + var namespace, projectUID, environmentUID, componentUID, searchQuery string + var enabled bool + var threshold float32 + var operator, window, interval string + + if enabledVal, ok := monitor["enabled"].(bool); ok { + enabled = enabledVal + } + + // Extract query and metadata from inputs + if inputs, ok := monitor["inputs"].([]interface{}); ok && len(inputs) > 0 { + if input, ok := inputs[0].(map[string]interface{}); ok { + if search, ok := input["search"].(map[string]interface{}); ok { + if queryMap, ok := search["query"].(map[string]interface{}); ok { + searchQuery, namespace, projectUID, environmentUID, componentUID = extractQueryMetadata(queryMap) + } + } + } + } + + // Extract trigger condition + if triggers, ok := monitor["triggers"].([]interface{}); ok && len(triggers) > 0 { + if trigger, ok := triggers[0].(map[string]interface{}); ok { + if qlt, ok := trigger["query_level_trigger"].(map[string]interface{}); ok { + if condition, ok := qlt["condition"].(map[string]interface{}); ok { + if script, ok := condition["script"].(map[string]interface{}); ok { + if source, ok := script["source"].(string); ok { + operator, threshold = parseConditionScript(source) + } + } + } + } + } + } + + // Extract schedule for interval + if schedule, ok := monitor["schedule"].(map[string]interface{}); ok { + if period, ok := schedule["period"].(map[string]interface{}); ok { + intervalVal := 0.0 + if v, ok := period["interval"].(float64); ok { + intervalVal = v + } + unit := getStringFromMap(period, "unit") + interval = formatScheduleToInterval(intervalVal, unit) + } + } + + // Extract window from query time range + window = extractWindowFromQuery(monitor) + + operatorEnum := gen.AlertRuleResponseConditionOperator(opensearch.ReverseMapOperator(operator)) + + metadata := &struct { + ComponentUid *openapi_types.UUID `json:"componentUid,omitempty"` + EnvironmentUid *openapi_types.UUID `json:"environmentUid,omitempty"` + Name *string `json:"name,omitempty"` + Namespace *string `json:"namespace,omitempty"` + ProjectUid *openapi_types.UUID `json:"projectUid,omitempty"` + }{ + Name: &name, + Namespace: strPtr(namespace), + } + if projectUID != "" { + if uid, ok := parseUUID(projectUID); ok { + metadata.ProjectUid = &uid + } + } + if environmentUID != "" { + if uid, ok := parseUUID(environmentUID); ok { + metadata.EnvironmentUid = &uid + } + } + if componentUID != "" { + if uid, ok := parseUUID(componentUID); ok { + metadata.ComponentUid = &uid + } + } + + response := gen.AlertRuleResponse{ + Metadata: metadata, + Source: &struct { + Metric *gen.AlertRuleResponseSourceMetric `json:"metric,omitempty"` + Query *string `json:"query,omitempty"` + }{ + Metric: ptr(gen.AlertRuleResponseSourceMetric("log")), + Query: &searchQuery, + }, + Condition: &struct { + Enabled *bool `json:"enabled,omitempty"` + Interval *string `json:"interval,omitempty"` + Operator *gen.AlertRuleResponseConditionOperator `json:"operator,omitempty"` + Threshold *float32 `json:"threshold,omitempty"` + Window *string `json:"window,omitempty"` + }{ + Enabled: &enabled, + Operator: &operatorEnum, + Threshold: &threshold, + Window: &window, + Interval: &interval, + }, + } + + return response, nil +} + +// extractQueryMetadata extracts metadata from the monitor's search query. +func extractQueryMetadata(queryMap map[string]interface{}) (searchQuery, namespace, projectUID, environmentUID, componentUID string) { + boolQuery, ok := queryMap["query"].(map[string]interface{}) + if !ok { + return + } + boolMap, ok := boolQuery["bool"].(map[string]interface{}) + if !ok { + return + } + filters, ok := boolMap["filter"].([]interface{}) + if !ok { + return + } + + for _, f := range filters { + filter, ok := f.(map[string]interface{}) + if !ok { + continue + } + if termMap, ok := filter["term"].(map[string]interface{}); ok { + for key, val := range termMap { + valMap, ok := val.(map[string]interface{}) + if !ok { + continue + } + value := getStringFromMap(valMap, "value") + switch key { + case opensearch.OSComponentID: + componentUID = value + case opensearch.OSEnvironmentID: + environmentUID = value + case opensearch.OSProjectID: + projectUID = value + } + } + } + if wildcardMap, ok := filter["wildcard"].(map[string]interface{}); ok { + if logMap, ok := wildcardMap["log"].(map[string]interface{}); ok { + if pattern, ok := logMap["wildcard"].(string); ok { + // Remove leading/trailing * from pattern + searchQuery = strings.TrimPrefix(strings.TrimSuffix(pattern, "*"), "*") + } + } + } + } + + return searchQuery, namespace, projectUID, environmentUID, componentUID +} + +// parseConditionScript parses the trigger condition script to extract operator and threshold. +func parseConditionScript(source string) (string, float32) { + // Format: "ctx.results[0].hits.total.value > 10" + parts := strings.Fields(source) + if len(parts) < 3 { + return "", 0 + } + operator := parts[len(parts)-2] + thresholdStr := parts[len(parts)-1] + threshold, _ := strconv.ParseFloat(thresholdStr, 32) + return operator, float32(threshold) +} + +// formatScheduleToInterval converts schedule period to a duration string. +func formatScheduleToInterval(interval float64, unit string) string { + switch strings.ToUpper(unit) { + case "MINUTES": + return fmt.Sprintf("%dm", int(interval)) + case "HOURS": + return fmt.Sprintf("%dh", int(interval)) + } + return fmt.Sprintf("%dm", int(interval)) +} + +// extractWindowFromQuery extracts the window duration from the monitor's query time range. +func extractWindowFromQuery(monitor map[string]interface{}) string { + if inputs, ok := monitor["inputs"].([]interface{}); ok && len(inputs) > 0 { + if input, ok := inputs[0].(map[string]interface{}); ok { + if search, ok := input["search"].(map[string]interface{}); ok { + if queryMap, ok := search["query"].(map[string]interface{}); ok { + if boolQuery, ok := queryMap["query"].(map[string]interface{}); ok { + if boolMap, ok := boolQuery["bool"].(map[string]interface{}); ok { + if filters, ok := boolMap["filter"].([]interface{}); ok { + for _, f := range filters { + filter, ok := f.(map[string]interface{}) + if !ok { + continue + } + if rangeMap, ok := filter["range"].(map[string]interface{}); ok { + if tsMap, ok := rangeMap["@timestamp"].(map[string]interface{}); ok { + if from, ok := tsMap["from"].(string); ok { + // Format: "{{period_end}}||-1h" + if idx := strings.Index(from, "||-"); idx != -1 { + return from[idx+3:] + } + } + } + } + } + } + } + } + } + } + } + } + return "" +} + +func toComponentLogEntry(l *opensearch.LogEntry) gen.ComponentLogEntry { + ts := l.Timestamp + entry := gen.ComponentLogEntry{ + Timestamp: &ts, + Log: &l.Log, + Level: &l.LogLevel, + Metadata: &struct { + ComponentName *string `json:"componentName,omitempty"` + ComponentUid *openapi_types.UUID `json:"componentUid,omitempty"` + ContainerName *string `json:"containerName,omitempty"` + EnvironmentName *string `json:"environmentName,omitempty"` + EnvironmentUid *openapi_types.UUID `json:"environmentUid,omitempty"` + NamespaceName *string `json:"namespaceName,omitempty"` + PodName *string `json:"podName,omitempty"` + PodNamespace *string `json:"podNamespace,omitempty"` + ProjectName *string `json:"projectName,omitempty"` + ProjectUid *openapi_types.UUID `json:"projectUid,omitempty"` + }{ + NamespaceName: strPtr(l.NamespaceName), + ContainerName: strPtr(l.ContainerName), + PodName: strPtr(l.PodName), + PodNamespace: strPtr(l.PodNamespace), + ComponentName: strPtr(l.ComponentName), + ProjectName: strPtr(l.ProjectName), + EnvironmentName: strPtr(l.EnvironmentName), + }, + } + + if l.ComponentID != "" { + if uid, ok := parseUUID(l.ComponentID); ok { + entry.Metadata.ComponentUid = &uid + } + } + if l.ProjectID != "" { + if uid, ok := parseUUID(l.ProjectID); ok { + entry.Metadata.ProjectUid = &uid + } + } + if l.EnvironmentID != "" { + if uid, ok := parseUUID(l.EnvironmentID); ok { + entry.Metadata.EnvironmentUid = &uid + } + } + + return entry +} + +func getStringFromMap(m map[string]interface{}, key string) string { + if val, ok := m[key].(string); ok { + return val + } + return "" +} + +func parseUUID(s string) (openapi_types.UUID, bool) { + parsed, err := uuid.Parse(s) + if err != nil { + return openapi_types.UUID{}, false + } + return openapi_types.UUID(parsed), true +} + +func ptr[T any](v T) *T { + return &v +} + +func strPtr(s string) *string { + if s == "" { + return nil + } + return &s +} diff --git a/observability-logs-opensearch/internal/observer/client.go b/observability-logs-opensearch/internal/observer/client.go new file mode 100644 index 0000000..c40ed6b --- /dev/null +++ b/observability-logs-opensearch/internal/observer/client.go @@ -0,0 +1,79 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package observer + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" +) + +// Client is an HTTP client for forwarding alerts to the Observer API. +type Client struct { + baseURL string + httpClient *http.Client +} + +// NewClient creates a new Observer client. +func NewClient(baseURL string) *Client { + return &Client{ + baseURL: strings.TrimRight(baseURL, "/"), + httpClient: &http.Client{ + Timeout: 10 * time.Second, + }, + } +} + +type alertWebhookRequest struct { + RuleName string `json:"ruleName"` + RuleNamespace string `json:"ruleNamespace"` + AlertValue float64 `json:"alertValue"` + AlertTimestamp time.Time `json:"alertTimestamp"` +} + +// ForwardAlert sends an alert to the Observer webhook API. +func (c *Client) ForwardAlert( + ctx context.Context, + ruleName string, + ruleNamespace string, + alertValue float64, + alertTimestamp time.Time, +) error { + payload := alertWebhookRequest{ + RuleName: ruleName, + RuleNamespace: ruleNamespace, + AlertValue: alertValue, + AlertTimestamp: alertTimestamp, + } + + body, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("failed to marshal webhook payload: %w", err) + } + + url := c.baseURL + "/api/v1alpha1/alerts/webhook" + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + + resp, err := c.httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to call observer webhook endpoint: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices { + respBody, _ := io.ReadAll(resp.Body) + return fmt.Errorf("observer webhook endpoint returned status %d: %s", resp.StatusCode, strings.TrimSpace(string(respBody))) + } + + return nil +} diff --git a/observability-logs-opensearch/internal/opensearch/client.go b/observability-logs-opensearch/internal/opensearch/client.go new file mode 100644 index 0000000..2c41117 --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/client.go @@ -0,0 +1,357 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "log/slog" + "net/http" + "strings" + + "github.com/opensearch-project/opensearch-go/v4" + "github.com/opensearch-project/opensearch-go/v4/opensearchapi" +) + +const alertsIndexName = "openchoreo-alerts" + +// Client wraps the OpenSearch client with logging. +type Client struct { + client *opensearchapi.Client + logger *slog.Logger + address string + insecureSkipVerify bool +} + +// NewClient creates a new OpenSearch client with the provided configuration. +func NewClient(address, username, password string, insecureSkipVerify bool, logger *slog.Logger) (*Client, error) { + client, err := opensearchapi.NewClient(opensearchapi.Config{ + Client: opensearch.Config{ + Addresses: []string{address}, + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: insecureSkipVerify, //nolint:gosec // G402: Using self-signed cert + }, + }, + Username: username, + Password: password, + }, + }) + if err != nil { + return nil, fmt.Errorf("failed to create OpenSearch client: %w", err) + } + + return &Client{ + client: client, + logger: logger, + address: address, + insecureSkipVerify: insecureSkipVerify, + }, nil +} + +// CheckHealth performs a health check against the OpenSearch cluster. +func (c *Client) CheckHealth(ctx context.Context) error { + req, err := http.NewRequestWithContext(ctx, "GET", "/_cluster/health", nil) + if err != nil { + return fmt.Errorf("failed to create health check request: %w", err) + } + + res, err := c.client.Client.Perform(req) + if err != nil { + return fmt.Errorf("health check request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + bodyBytes, _ := io.ReadAll(res.Body) + return fmt.Errorf("health check failed with status %d: %s", res.StatusCode, string(bodyBytes)) + } + + var health struct { + Status string `json:"status"` + } + if err := json.NewDecoder(res.Body).Decode(&health); err != nil { + return fmt.Errorf("failed to parse health response: %w", err) + } + + if health.Status != "green" && health.Status != "yellow" { + return fmt.Errorf("cluster health status is %q", health.Status) + } + + c.logger.Info("OpenSearch cluster health check passed", slog.String("status", health.Status)) + return nil +} + +// Search executes a search request against OpenSearch. +func (c *Client) Search(ctx context.Context, indices []string, query map[string]interface{}) (*SearchResponse, error) { + c.logger.Debug("Executing search", "indices", indices) + + ignoreUnavailable := true + resp, err := c.client.Search(ctx, &opensearchapi.SearchReq{ + Indices: indices, + Body: buildSearchBody(query), + Params: opensearchapi.SearchParams{ + IgnoreUnavailable: &ignoreUnavailable, + }, + }) + if err != nil { + c.logger.Error("Search request failed", "error", err) + return nil, fmt.Errorf("search request failed: %w", err) + } + + response := &SearchResponse{ + Took: resp.Took, + TimedOut: resp.Timeout, + } + response.Hits.Total.Value = resp.Hits.Total.Value + response.Hits.Total.Relation = resp.Hits.Total.Relation + + for _, h := range resp.Hits.Hits { + var source map[string]interface{} + if err := json.Unmarshal(h.Source, &source); err != nil { + c.logger.Warn("Failed to unmarshal hit source", "hit_id", h.ID, "error", err) + continue + } + hit := Hit{ + ID: h.ID, + Source: source, + } + score := float64(h.Score) + hit.Score = &score + response.Hits.Hits = append(response.Hits.Hits, hit) + } + + c.logger.Debug("Search completed", + "total_hits", response.Hits.Total.Value, + "returned_hits", len(response.Hits.Hits)) + + return response, nil +} + +// SearchMonitorByName searches alerting monitors by name using the Alerting plugin API. +func (c *Client) SearchMonitorByName(ctx context.Context, name string) (string, bool, error) { + path := "/_plugins/_alerting/monitors/_search" + nameJSON, err := json.Marshal(name) + if err != nil { + return "", false, fmt.Errorf("failed to marshal monitor name: %w", err) + } + queryBody := fmt.Sprintf(`{ + "query": { + "match_phrase": { + "monitor.name": %s + } + } + }`, string(nameJSON)) + + req, err := http.NewRequestWithContext(ctx, "POST", path, strings.NewReader(queryBody)) + if err != nil { + return "", false, fmt.Errorf("failed to create request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Client.Perform(req) + if err != nil { + return "", false, fmt.Errorf("monitor search request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return "", false, fmt.Errorf("monitor search request failed with status: %d", res.StatusCode) + } + + parsed, err := parseSearchResponse(res.Body) + if err != nil { + return "", false, fmt.Errorf("failed to parse monitor search response: %w", err) + } + + if parsed.Hits.Total.Value == 0 || len(parsed.Hits.Hits) == 0 { + return "", false, nil + } + if parsed.Hits.Hits[0].ID == "" { + return "", false, fmt.Errorf("monitor search response missing _id field") + } + return parsed.Hits.Hits[0].ID, true, nil +} + +// CreateMonitor creates a new alerting monitor using the Alerting plugin API. +func (c *Client) CreateMonitor(ctx context.Context, monitor map[string]interface{}) (string, int64, error) { + body, err := json.Marshal(monitor) + if err != nil { + return "", 0, fmt.Errorf("failed to marshal monitor: %w", err) + } + c.logger.Debug("Creating monitor", "body", string(body)) + + path := "/_plugins/_alerting/monitors" + req, err := http.NewRequest("POST", path, bytes.NewReader(body)) + if err != nil { + return "", 0, fmt.Errorf("failed to create request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Client.Perform(req) + if err != nil { + return "", 0, fmt.Errorf("monitor create request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusCreated { + bodyBytes, _ := io.ReadAll(res.Body) + c.logger.Error("Monitor create failed", + "status", res.StatusCode, + "response", string(bodyBytes)) + return "", 0, fmt.Errorf("monitor create request failed with status: %d, response: %s", res.StatusCode, string(bodyBytes)) + } + + type MonitorUpsertResponse struct { + LastUpdateTime int64 `json:"last_update_time"` + } + var parsed struct { + ID string `json:"_id"` + Monitor MonitorUpsertResponse `json:"monitor"` + } + if err := json.NewDecoder(res.Body).Decode(&parsed); err != nil { + return "", 0, fmt.Errorf("failed to parse monitor create response: %w", err) + } + + c.logger.Debug("Monitor created", slog.String("id", parsed.ID), slog.Int64("last_update_time", parsed.Monitor.LastUpdateTime)) + return parsed.ID, parsed.Monitor.LastUpdateTime, nil +} + +// GetMonitorByID retrieves an alerting monitor by ID using the Alerting plugin API. +func (c *Client) GetMonitorByID(ctx context.Context, monitorID string) (map[string]interface{}, error) { + path := fmt.Sprintf("/_plugins/_alerting/monitors/%s", monitorID) + req, err := http.NewRequestWithContext(ctx, "GET", path, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Client.Perform(req) + if err != nil { + return nil, fmt.Errorf("monitor get request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + bodyBytes, _ := io.ReadAll(res.Body) + c.logger.Error("Monitor get failed", + "status", res.StatusCode, + "monitor_id", monitorID, + "response", string(bodyBytes)) + return nil, fmt.Errorf("monitor get request failed with status: %d, response: %s", res.StatusCode, string(bodyBytes)) + } + + var response map[string]interface{} + if err := json.NewDecoder(res.Body).Decode(&response); err != nil { + return nil, fmt.Errorf("failed to parse monitor get response: %w", err) + } + + if monitor, ok := response["monitor"].(map[string]interface{}); ok { + return monitor, nil + } + + return nil, fmt.Errorf("monitor object not found in response") +} + +// UpdateMonitor updates an existing alerting monitor using the Alerting plugin API. +func (c *Client) UpdateMonitor(ctx context.Context, monitorID string, monitor map[string]interface{}) (int64, error) { + body, err := json.Marshal(monitor) + if err != nil { + return 0, fmt.Errorf("failed to marshal monitor: %w", err) + } + c.logger.Debug("Updating monitor", "monitor_id", monitorID, "body", string(body)) + + path := fmt.Sprintf("/_plugins/_alerting/monitors/%s", monitorID) + req, err := http.NewRequestWithContext(ctx, "PUT", path, bytes.NewReader(body)) + if err != nil { + return 0, fmt.Errorf("failed to create request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Client.Perform(req) + if err != nil { + return 0, fmt.Errorf("monitor update request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + bodyBytes, _ := io.ReadAll(res.Body) + c.logger.Error("Monitor update failed", + "status", res.StatusCode, + "monitor_id", monitorID, + "response", string(bodyBytes)) + return 0, fmt.Errorf("monitor update request failed with status: %d, response: %s", res.StatusCode, string(bodyBytes)) + } + + type MonitorUpsertResponse struct { + LastUpdateTime int64 `json:"last_update_time"` + } + var parsed struct { + ID string `json:"_id"` + Monitor MonitorUpsertResponse `json:"monitor"` + } + if err := json.NewDecoder(res.Body).Decode(&parsed); err != nil { + return 0, fmt.Errorf("failed to parse monitor update response: %w", err) + } + + c.logger.Debug("Monitor updated successfully", + "monitor_id", monitorID, + "last_update_time", parsed.Monitor.LastUpdateTime) + return parsed.Monitor.LastUpdateTime, nil +} + +// DeleteMonitor deletes an alerting monitor using the Alerting plugin API. +func (c *Client) DeleteMonitor(ctx context.Context, monitorID string) error { + path := fmt.Sprintf("/_plugins/_alerting/monitors/%s", monitorID) + req, err := http.NewRequestWithContext(ctx, "DELETE", path, nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Client.Perform(req) + if err != nil { + return fmt.Errorf("monitor delete request failed: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusNoContent { + bodyBytes, _ := io.ReadAll(res.Body) + c.logger.Error("Monitor delete failed", + "status", res.StatusCode, + "monitor_id", monitorID, + "response", string(bodyBytes)) + return fmt.Errorf("monitor delete request failed with status: %d, response: %s", res.StatusCode, string(bodyBytes)) + } + + c.logger.Debug("Monitor deleted successfully", "monitor_id", monitorID) + return nil +} + +// WriteAlertEntry writes an alert entry to the openchoreo-alerts index. +func (c *Client) WriteAlertEntry(ctx context.Context, entry map[string]interface{}) (string, error) { + body, err := json.Marshal(entry) + if err != nil { + return "", fmt.Errorf("failed to marshal alert entry: %w", err) + } + + resp, err := c.client.Index(ctx, opensearchapi.IndexReq{ + Index: alertsIndexName, + Body: bytes.NewReader(body), + Params: opensearchapi.IndexParams{ + Refresh: "true", + }, + }) + if err != nil { + c.logger.Error("Alert index request failed", "error", err) + return "", fmt.Errorf("alert index request failed: %w", err) + } + + c.logger.Debug("Alert entry written", "alert_id", resp.ID) + return resp.ID, nil +} diff --git a/observability-logs-opensearch/internal/opensearch/labels.go b/observability-logs-opensearch/internal/opensearch/labels.go new file mode 100644 index 0000000..c1b78cd --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/labels.go @@ -0,0 +1,62 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import "strings" + +// ReplaceDots replaces dots with underscores in a string. +// This is used to match Fluent-Bit's Replace_Dots behavior in the OpenSearch output plugin. +func ReplaceDots(s string) string { + return strings.ReplaceAll(s, ".", "_") +} + +// Kubernetes label keys used for log filtering and identification. +const ( + ComponentID = "openchoreo.dev/component-uid" + EnvironmentID = "openchoreo.dev/environment-uid" + ProjectID = "openchoreo.dev/project-uid" + ComponentName = "openchoreo.dev/component" + EnvironmentName = "openchoreo.dev/environment" + ProjectName = "openchoreo.dev/project" + Version = "version" + VersionID = "version_id" + NamespaceName = "openchoreo.dev/namespace" + BuildID = "build-name" + BuildUUID = "uuid" + Target = "target" +) + +// Target value constants for different log types. +const ( + TargetBuild = "build" + TargetRuntime = "runtime" +) + +// Query parameter constants for log types. +const ( + QueryParamLogTypeBuild = "BUILD" + QueryParamLogTypeRuntime = "RUNTIME" +) + +// OpenSearch field paths for querying Kubernetes labels in log documents. +const ( + KubernetesPrefix = "kubernetes" + KubernetesLabelsPrefix = KubernetesPrefix + ".labels" + KubernetesPodName = KubernetesPrefix + ".pod_name" + KubernetesContainerName = KubernetesPrefix + ".container_name" + KubernetesNamespaceName = KubernetesPrefix + ".namespace_name" +) + +// OpenSearch field paths with dots replaced by underscores in label keys. +var ( + OSComponentID = KubernetesLabelsPrefix + "." + ReplaceDots(ComponentID) + OSEnvironmentID = KubernetesLabelsPrefix + "." + ReplaceDots(EnvironmentID) + OSProjectID = KubernetesLabelsPrefix + "." + ReplaceDots(ProjectID) + OSVersion = KubernetesLabelsPrefix + "." + ReplaceDots(Version) + OSVersionID = KubernetesLabelsPrefix + "." + ReplaceDots(VersionID) + OSNamespaceName = KubernetesLabelsPrefix + "." + ReplaceDots(NamespaceName) + OSBuildID = KubernetesLabelsPrefix + "." + ReplaceDots(BuildID) + OSBuildUUID = KubernetesLabelsPrefix + "." + ReplaceDots(BuildUUID) + OSTarget = KubernetesLabelsPrefix + "." + ReplaceDots(Target) +) diff --git a/observability-logs-opensearch/internal/opensearch/queries.go b/observability-logs-opensearch/internal/opensearch/queries.go new file mode 100644 index 0000000..67ce2b6 --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/queries.go @@ -0,0 +1,491 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + "time" +) + +// sanitizeWildcardValue escapes OpenSearch wildcard metacharacters from user-provided values +// to prevent wildcard injection attacks. Escaped characters: \, ", *, ? +func sanitizeWildcardValue(s string) string { + s = strings.ReplaceAll(s, `\`, `\\`) + s = strings.ReplaceAll(s, `"`, `\"`) + s = strings.ReplaceAll(s, `*`, `\*`) + s = strings.ReplaceAll(s, `?`, `\?`) + return s +} + +// QueryBuilder provides methods to build OpenSearch queries. +type QueryBuilder struct { + indexPrefix string +} + +// NewQueryBuilder creates a new query builder with the given index prefix. +func NewQueryBuilder(indexPrefix string) *QueryBuilder { + return &QueryBuilder{ + indexPrefix: indexPrefix, + } +} + +// formatDurationForOpenSearch normalizes durations so OpenSearch monitors accept them. +func formatDurationForOpenSearch(d string) (string, error) { + parsed, err := time.ParseDuration(d) + if err != nil { + return "", err + } + + switch { + case parsed%time.Hour == 0: + return fmt.Sprintf("%dh", parsed/time.Hour), nil + case parsed%time.Minute == 0: + return fmt.Sprintf("%dm", parsed/time.Minute), nil + } + return "", fmt.Errorf("duration must be a whole number of minutes or hours; seconds are not supported: %s", d) +} + +// addTimeRangeFilter adds time range filter to must conditions. +func addTimeRangeFilter(mustConditions []map[string]interface{}, startTime, endTime string) []map[string]interface{} { + if startTime != "" && endTime != "" { + timeFilter := map[string]interface{}{ + "range": map[string]interface{}{ + "@timestamp": map[string]interface{}{ + "gt": startTime, + "lt": endTime, + }, + }, + } + mustConditions = append(mustConditions, timeFilter) + } + return mustConditions +} + +// addSearchPhraseFilter adds wildcard search phrase filter to must conditions. +func addSearchPhraseFilter(mustConditions []map[string]interface{}, searchPhrase string) []map[string]interface{} { + if searchPhrase != "" { + searchFilter := map[string]interface{}{ + "wildcard": map[string]interface{}{ + "log": "*" + sanitizeWildcardValue(searchPhrase) + "*", + }, + } + mustConditions = append(mustConditions, searchFilter) + } + return mustConditions +} + +// addLogLevelFilter adds log level filter to must conditions. +func addLogLevelFilter(mustConditions []map[string]interface{}, logLevels []string) []map[string]interface{} { + if len(logLevels) > 0 { + shouldConditions := make([]map[string]interface{}, 0, len(logLevels)) + + for _, logLevel := range logLevels { + shouldConditions = append(shouldConditions, map[string]interface{}{ + "wildcard": map[string]interface{}{ + "log": map[string]interface{}{ + "value": "*" + sanitizeWildcardValue(strings.ToUpper(logLevel)) + "*", + "case_insensitive": true, + }, + }, + }) + } + + if len(shouldConditions) > 0 { + logLevelFilter := map[string]interface{}{ + "bool": map[string]interface{}{ + "should": shouldConditions, + "minimum_should_match": 1, + }, + } + mustConditions = append(mustConditions, logLevelFilter) + } + } + return mustConditions +} + +// BuildComponentLogsQueryV1 builds a query for the API component logs endpoint. +func (qb *QueryBuilder) BuildComponentLogsQueryV1(params ComponentLogsQueryParamsV1) (map[string]interface{}, error) { + if params.StartTime == "" || params.EndTime == "" || params.NamespaceName == "" { + return nil, fmt.Errorf("start time, end time, and namespace name are required") + } + mustConditions := []map[string]interface{}{} + + mustConditions = addTimeRangeFilter(mustConditions, params.StartTime, params.EndTime) + + namespaceFilter := map[string]interface{}{ + "term": map[string]interface{}{ + OSNamespaceName: params.NamespaceName, + }, + } + mustConditions = append(mustConditions, namespaceFilter) + + if params.ProjectID != "" { + projectFilter := map[string]interface{}{ + "term": map[string]interface{}{ + OSProjectID: params.ProjectID, + }, + } + mustConditions = append(mustConditions, projectFilter) + } + + if params.ComponentID != "" { + componentFilter := map[string]interface{}{ + "term": map[string]interface{}{ + OSComponentID: params.ComponentID, + }, + } + mustConditions = append(mustConditions, componentFilter) + } + + if params.EnvironmentID != "" { + environmentFilter := map[string]interface{}{ + "term": map[string]interface{}{ + OSEnvironmentID: params.EnvironmentID, + }, + } + mustConditions = append(mustConditions, environmentFilter) + } + + mustConditions = addSearchPhraseFilter(mustConditions, params.SearchPhrase) + mustConditions = addLogLevelFilter(mustConditions, params.LogLevels) + + limit := params.Limit + if limit <= 0 { + limit = 100 + } + + sortOrder := params.SortOrder + if sortOrder == "" { + sortOrder = "desc" + } + + query := map[string]interface{}{ + "size": limit, + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "must": mustConditions, + }, + }, + "sort": []map[string]interface{}{ + { + "@timestamp": map[string]interface{}{ + "order": sortOrder, + }, + }, + }, + } + + return query, nil +} + +// BuildWorkflowRunLogsQuery builds a query for workflow run logs with wildcard search. +func (qb *QueryBuilder) BuildWorkflowRunLogsQuery(params WorkflowRunQueryParams) map[string]interface{} { + sanitizedWorkflowRunID := sanitizeWildcardValue(params.WorkflowRunID) + podNamePattern := sanitizedWorkflowRunID + "*" + + mustConditions := []map[string]interface{}{ + { + "wildcard": map[string]interface{}{ + KubernetesPodName: podNamePattern, + }, + }, + } + if params.StepName != "" { + const kubeAnnotationsPrefix = "kubernetes.annotations." + const argoNodeNameAnnotation = "workflows_argoproj_io/node-name" + stepNameFilter := map[string]interface{}{ + "wildcard": map[string]interface{}{ + kubeAnnotationsPrefix + argoNodeNameAnnotation: "*" + sanitizeWildcardValue(params.StepName) + "*", + }, + } + mustConditions = append(mustConditions, stepNameFilter) + } + mustConditions = addTimeRangeFilter(mustConditions, params.QueryParams.StartTime, params.QueryParams.EndTime) + + if params.QueryParams.NamespaceName != "" { + k8sNamespace := fmt.Sprintf("workflows-%s", params.QueryParams.NamespaceName) + namespaceFilter := map[string]interface{}{ + "term": map[string]interface{}{ + KubernetesNamespaceName: k8sNamespace, + }, + } + mustConditions = append(mustConditions, namespaceFilter) + } + + mustNotConditions := []map[string]interface{}{ + { + "term": map[string]interface{}{ + KubernetesContainerName: "init", + }, + }, + { + "term": map[string]interface{}{ + KubernetesContainerName: "wait", + }, + }, + } + + query := map[string]interface{}{ + "size": params.QueryParams.Limit, + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "must": mustConditions, + "must_not": mustNotConditions, + }, + }, + "sort": []map[string]interface{}{ + { + "@timestamp": map[string]interface{}{ + "order": params.QueryParams.SortOrder, + }, + }, + }, + } + return query +} + +// GenerateIndices generates the list of indices to search based on time range. +func (qb *QueryBuilder) GenerateIndices(startTime, endTime string) ([]string, error) { + if startTime == "" || endTime == "" { + return []string{qb.indexPrefix + "*"}, nil + } + + start, err := time.Parse(time.RFC3339, startTime) + if err != nil { + return nil, fmt.Errorf("invalid start time format: %w", err) + } + + end, err := time.Parse(time.RFC3339, endTime) + if err != nil { + return nil, fmt.Errorf("invalid end time format: %w", err) + } + + indices := []string{} + current := start + + for current.Before(end) || current.Equal(end) { + indexName := qb.indexPrefix + current.Format("2006-01-02") + indices = append(indices, indexName) + current = current.AddDate(0, 0, 1) + } + + endIndexName := qb.indexPrefix + end.Format("2006-01-02") + if !contains(indices, endIndexName) { + indices = append(indices, endIndexName) + } + + return indices, nil +} + +func contains(slice []string, item string) bool { + for _, s := range slice { + if s == item { + return true + } + } + return false +} + +// BuildLogAlertingRuleQuery builds the query for a log alerting rule monitor. +func (qb *QueryBuilder) BuildLogAlertingRuleQuery(params AlertingRuleRequest) (map[string]interface{}, error) { + window, err := formatDurationForOpenSearch(params.Condition.Window) + if err != nil { + return nil, fmt.Errorf("failed to format window duration: %w", err) + } + filterConditions := []map[string]interface{}{ + { + "range": map[string]interface{}{ + "@timestamp": map[string]interface{}{ + "from": "{{period_end}}||-" + window, + "to": "{{period_end}}", + "format": "epoch_millis", + "include_lower": true, + "include_upper": true, + "boost": 1, + }, + }, + }, + { + "term": map[string]interface{}{ + OSComponentID: map[string]interface{}{ + "value": params.Metadata.ComponentUID, + "boost": 1, + }, + }, + }, + { + "term": map[string]interface{}{ + OSEnvironmentID: map[string]interface{}{ + "value": params.Metadata.EnvironmentUID, + "boost": 1, + }, + }, + }, + { + "term": map[string]interface{}{ + OSProjectID: map[string]interface{}{ + "value": params.Metadata.ProjectUID, + "boost": 1, + }, + }, + }, + { + "wildcard": map[string]interface{}{ + "log": map[string]interface{}{ + "wildcard": "*" + sanitizeWildcardValue(params.Source.Query) + "*", + "boost": 1, + }, + }, + }, + } + + query := map[string]interface{}{ + "size": 0, + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "filter": filterConditions, + "adjust_pure_negative": true, + "boost": 1, + }, + }, + } + return query, nil +} + +// BuildLogAlertingRuleMonitorBody builds the full monitor body for an alerting rule. +func (qb *QueryBuilder) BuildLogAlertingRuleMonitorBody(params AlertingRuleRequest) (map[string]interface{}, error) { + intervalDuration, err := time.ParseDuration(params.Condition.Interval) + if err != nil { + return nil, fmt.Errorf("invalid interval format: %w", err) + } + + query, err := qb.BuildLogAlertingRuleQuery(params) + if err != nil { + return nil, fmt.Errorf("failed to build log alerting rule query: %w", err) + } + + monitorBody := MonitorBody{ + Type: "monitor", + MonitorType: "query_level_monitor", + Name: params.Metadata.Name, + Enabled: params.Condition.Enabled, + Schedule: MonitorSchedule{ + Period: MonitorSchedulePeriod{ + Interval: intervalDuration.Minutes(), + Unit: "MINUTES", + }, + }, + Inputs: []MonitorInput{ + { + Search: MonitorInputSearch{ + Indices: []string{qb.indexPrefix + "*"}, + Query: query, + }, + }, + }, + Triggers: []MonitorTrigger{ + { + QueryLevelTrigger: &MonitorTriggerQueryLevelTrigger{ + Name: "trigger-" + params.Metadata.Name, + Severity: "1", + Condition: MonitorTriggerCondition{ + Script: MonitorTriggerConditionScript{ + Source: fmt.Sprintf("ctx.results[0].hits.total.value %s %s", GetOperatorSymbol(params.Condition.Operator), strconv.FormatFloat(params.Condition.Threshold, 'f', -1, 64)), + Lang: "painless", + }, + }, + Actions: []MonitorTriggerAction{ + { + Name: "action-" + params.Metadata.Name, + DestinationID: "openchoreo-observer-alerting-webhook", + MessageTemplate: MonitorMessageTemplate{ + Source: buildWebhookMessageTemplate(params), + Lang: "mustache", + }, + ThrottleEnabled: true, + Throttle: MonitorTriggerActionThrottle{ + Value: 60, + Unit: "MINUTES", + }, + SubjectTemplate: MonitorMessageTemplate{ + Source: "TheSubject", + Lang: "mustache", + }, + ActionExecutionPolicy: MonitorTriggerActionExecutionPolicy{ + ActionExecutionScope: MonitorTriggerActionExecutionScope{ + PerAlert: MonitorActionExecutionScopePerAlert{ + ActionableAlerts: []string{"DEDUPED", "NEW"}, + }, + }, + }, + }, + }, + }, + }, + }, + } + + bodyBytes, err := json.Marshal(monitorBody) + if err != nil { + return nil, fmt.Errorf("failed to marshal monitor body: %w", err) + } + + var result map[string]interface{} + if err := json.Unmarshal(bodyBytes, &result); err != nil { + return nil, fmt.Errorf("failed to unmarshal monitor body: %w", err) + } + + return result, nil +} + +// GetOperatorSymbol converts an operator string to its symbol. +func GetOperatorSymbol(operator string) string { + switch operator { + case "gt": + return ">" + case "gte": + return ">=" + case "lt": + return "<" + case "lte": + return "<=" + } + return "" +} + +// ReverseMapOperator converts an operator symbol back to its string name. +func ReverseMapOperator(operator string) string { + switch operator { + case ">": + return "gt" + case ">=": + return "gte" + case "<": + return "lt" + case "<=": + return "lte" + } + return "" +} + +// buildWebhookMessageTemplate builds a JSON message template for webhook notifications. +func buildWebhookMessageTemplate(params AlertingRuleRequest) string { + ruleName, _ := json.Marshal(params.Metadata.Name) + ruleNamespace, _ := json.Marshal(params.Metadata.Namespace) + componentUID, _ := json.Marshal(params.Metadata.ComponentUID) + projectUID, _ := json.Marshal(params.Metadata.ProjectUID) + environmentUID, _ := json.Marshal(params.Metadata.EnvironmentUID) + + return fmt.Sprintf( + `{"ruleName":%s,"ruleNamespace":%s,"componentUid":%s,"projectUid":%s,"environmentUid":%s,"alertValue":{{ctx.results.0.hits.total.value}},"alertTimestamp":"{{ctx.periodStart}}"}`, + string(ruleName), + string(ruleNamespace), + string(componentUID), + string(projectUID), + string(environmentUID), + ) +} diff --git a/observability-logs-opensearch/internal/opensearch/types.go b/observability-logs-opensearch/internal/opensearch/types.go new file mode 100644 index 0000000..06ec1ac --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/types.go @@ -0,0 +1,309 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "encoding/json" + "io" + "strings" + "time" +) + +// SearchResponse represents the response from an OpenSearch search query. +type SearchResponse struct { + Hits struct { + Total struct { + Value int `json:"value"` + Relation string `json:"relation"` + } `json:"total"` + Hits []Hit `json:"hits"` + } `json:"hits"` + Took int `json:"took"` + TimedOut bool `json:"timed_out"` +} + +// Hit represents a single search result hit. +type Hit struct { + ID string `json:"_id"` + Source map[string]interface{} `json:"_source"` + Score *float64 `json:"_score"` +} + +// LogEntry represents a parsed log entry from OpenSearch. +type LogEntry struct { + Timestamp time.Time `json:"timestamp"` + Log string `json:"log"` + LogLevel string `json:"logLevel"` + ComponentID string `json:"componentId"` + EnvironmentID string `json:"environmentId"` + ProjectID string `json:"projectId"` + Version string `json:"version"` + VersionID string `json:"versionId"` + Namespace string `json:"namespace"` + PodID string `json:"podId"` + ContainerName string `json:"containerName"` + Labels map[string]string `json:"labels"` + ComponentName string `json:"componentName,omitempty"` + EnvironmentName string `json:"environmentName,omitempty"` + ProjectName string `json:"projectName,omitempty"` + NamespaceName string `json:"namespaceName,omitempty"` + PodNamespace string `json:"podNamespace,omitempty"` + PodName string `json:"podName,omitempty"` +} + +// WorkflowRunLogEntry represents a log entry for workflow run logs. +type WorkflowRunLogEntry struct { + Timestamp string `json:"timestamp"` + Log string `json:"log"` +} + +// QueryParams holds common query parameters. +type QueryParams struct { + StartTime string `json:"startTime"` + EndTime string `json:"endTime"` + SearchPhrase string `json:"searchPhrase"` + LogLevels []string `json:"logLevels"` + Limit int `json:"limit"` + SortOrder string `json:"sortOrder"` + ComponentID string `json:"componentId,omitempty"` + EnvironmentID string `json:"environmentId,omitempty"` + ProjectID string `json:"projectId,omitempty"` + NamespaceName string `json:"namespaceName,omitempty"` + Namespace string `json:"namespace,omitempty"` +} + +// ComponentLogsQueryParamsV1 holds query parameters for the API component logs query. +type ComponentLogsQueryParamsV1 struct { + StartTime string `json:"startTime"` + EndTime string `json:"endTime"` + NamespaceName string `json:"namespaceName"` + ProjectID string `json:"projectId,omitempty"` + ComponentID string `json:"componentId,omitempty"` + EnvironmentID string `json:"environmentId,omitempty"` + SearchPhrase string `json:"searchPhrase,omitempty"` + LogLevels []string `json:"logLevels,omitempty"` + Limit int `json:"limit"` + SortOrder string `json:"sortOrder"` +} + +// WorkflowRunQueryParams holds workflow run-specific query parameters. +type WorkflowRunQueryParams struct { + QueryParams + WorkflowRunID string `json:"workflowRunId"` + StepName string `json:"stepName,omitempty"` +} + +// AlertingRuleRequest defines the request structure for creating/updating alerting rules. +type AlertingRuleRequest struct { + Metadata AlertingRuleMetadata `json:"metadata"` + Source AlertingRuleSource `json:"source"` + Condition AlertingRuleCondition `json:"condition"` +} + +// AlertingRuleMetadata contains metadata about an alerting rule. +type AlertingRuleMetadata struct { + Name string `json:"name"` + Namespace string `json:"namespace"` + ComponentUID string `json:"component-uid"` + ProjectUID string `json:"project-uid"` + EnvironmentUID string `json:"environment-uid"` +} + +// AlertingRuleSource defines the source of data for the alerting rule. +type AlertingRuleSource struct { + Query string `json:"query"` +} + +// AlertingRuleCondition defines the condition that triggers the alert. +type AlertingRuleCondition struct { + Enabled bool `json:"enabled"` + Window string `json:"window"` + Interval string `json:"interval"` + Operator string `json:"operator"` + Threshold float64 `json:"threshold"` +} + +// MonitorBody represents the structure of an OpenSearch monitor. +type MonitorBody struct { + Type string `json:"type"` + MonitorType string `json:"monitor_type"` + Name string `json:"name"` + Enabled bool `json:"enabled"` + Schedule MonitorSchedule `json:"schedule"` + Inputs []MonitorInput `json:"inputs"` + Triggers []MonitorTrigger `json:"triggers"` +} + +// MonitorSchedule defines the monitoring schedule. +type MonitorSchedule struct { + Period MonitorSchedulePeriod `json:"period"` +} + +// MonitorSchedulePeriod defines the time period for schedule. +type MonitorSchedulePeriod struct { + Interval float64 `json:"interval"` + Unit string `json:"unit"` +} + +// MonitorInput defines the search input for the monitor. +type MonitorInput struct { + Search MonitorInputSearch `json:"search"` +} + +// MonitorInputSearch defines the search query and indices. +type MonitorInputSearch struct { + Indices []string `json:"indices"` + Query map[string]interface{} `json:"query"` +} + +// MonitorTrigger defines the conditions and actions for the monitor. +type MonitorTrigger struct { + QueryLevelTrigger *MonitorTriggerQueryLevelTrigger `json:"query_level_trigger,omitempty"` +} + +// MonitorTriggerQueryLevelTrigger defines a query-level trigger. +type MonitorTriggerQueryLevelTrigger struct { + Name string `json:"name"` + Severity string `json:"severity"` + Condition MonitorTriggerCondition `json:"condition"` + Actions []MonitorTriggerAction `json:"actions"` +} + +// MonitorTriggerCondition defines the trigger condition. +type MonitorTriggerCondition struct { + Script MonitorTriggerConditionScript `json:"script"` +} + +// MonitorTriggerConditionScript defines the script for evaluation. +type MonitorTriggerConditionScript struct { + Source string `json:"source"` + Lang string `json:"lang"` +} + +// MonitorTriggerAction defines the action to take when triggered. +type MonitorTriggerAction struct { + Name string `json:"name"` + DestinationID string `json:"destination_id"` + MessageTemplate MonitorMessageTemplate `json:"message_template"` + ThrottleEnabled bool `json:"throttle_enabled"` + Throttle MonitorTriggerActionThrottle `json:"throttle"` + SubjectTemplate MonitorMessageTemplate `json:"subject_template"` + ActionExecutionPolicy MonitorTriggerActionExecutionPolicy `json:"action_execution_policy"` +} + +// MonitorMessageTemplate defines the message template. +type MonitorMessageTemplate struct { + Source string `json:"source"` + Lang string `json:"lang"` +} + +// MonitorTriggerActionThrottle defines the throttle settings. +type MonitorTriggerActionThrottle struct { + Value int `json:"value"` + Unit string `json:"unit"` +} + +// MonitorTriggerActionExecutionPolicy defines when actions should be executed. +type MonitorTriggerActionExecutionPolicy struct { + ActionExecutionScope MonitorTriggerActionExecutionScope `json:"action_execution_scope"` +} + +// MonitorTriggerActionExecutionScope defines the scope of action execution. +type MonitorTriggerActionExecutionScope struct { + PerAlert MonitorActionExecutionScopePerAlert `json:"per_alert"` +} + +// MonitorActionExecutionScopePerAlert defines per-alert action settings. +type MonitorActionExecutionScopePerAlert struct { + ActionableAlerts []string `json:"actionable_alerts"` +} + +// buildSearchBody converts a query map to an io.Reader for the search request. +func buildSearchBody(query map[string]interface{}) io.Reader { + body, _ := json.Marshal(query) + return strings.NewReader(string(body)) +} + +// parseSearchResponse parses the search response from OpenSearch. +func parseSearchResponse(body io.Reader) (*SearchResponse, error) { + var response SearchResponse + decoder := json.NewDecoder(body) + if err := decoder.Decode(&response); err != nil { + return nil, err + } + return &response, nil +} + +// ParseLogEntry converts a search hit to a LogEntry struct. +func ParseLogEntry(hit Hit) LogEntry { + source := hit.Source + entry := LogEntry{ + Labels: make(map[string]string), + } + + if ts, ok := source["@timestamp"].(string); ok { + if parsed, err := time.Parse(time.RFC3339, ts); err == nil { + entry.Timestamp = parsed + } + } + + if log, ok := source["log"].(string); ok { + entry.Log = log + entry.LogLevel = extractLogLevel(log) + } + + if k8s, ok := source["kubernetes"].(map[string]interface{}); ok { + if labelMap, ok := k8s["labels"].(map[string]interface{}); ok { + entry.ComponentID = getStringValue(labelMap, ReplaceDots(ComponentID)) + entry.EnvironmentID = getStringValue(labelMap, ReplaceDots(EnvironmentID)) + entry.ProjectID = getStringValue(labelMap, ReplaceDots(ProjectID)) + entry.Version = getStringValue(labelMap, ReplaceDots(Version)) + entry.VersionID = getStringValue(labelMap, ReplaceDots(VersionID)) + entry.ComponentName = getStringValue(labelMap, ReplaceDots(ComponentName)) + entry.EnvironmentName = getStringValue(labelMap, ReplaceDots(EnvironmentName)) + entry.ProjectName = getStringValue(labelMap, ReplaceDots(ProjectName)) + entry.NamespaceName = getStringValue(labelMap, ReplaceDots(NamespaceName)) + + for k, v := range labelMap { + if str, ok := v.(string); ok { + entry.Labels[k] = str + } + } + } + + entry.Namespace = getStringValue(k8s, "namespace_name") + entry.PodNamespace = getStringValue(k8s, "namespace_name") + entry.PodID = getStringValue(k8s, "pod_id") + entry.PodName = getStringValue(k8s, "pod_name") + entry.ContainerName = getStringValue(k8s, "container_name") + } + + return entry +} + +// getStringValue safely extracts a string value from a map. +func getStringValue(m map[string]interface{}, key string) string { + if val, ok := m[key].(string); ok { + return val + } + return "" +} + +// extractLogLevel extracts log level from log content using common patterns. +func extractLogLevel(log string) string { + log = strings.ToUpper(log) + + logLevels := []string{"ERROR", "FATAL", "SEVERE", "WARN", "WARNING", "INFO", "DEBUG", "UNDEFINED"} + + for _, level := range logLevels { + if strings.Contains(log, level) { + if level == "WARNING" { + return "WARN" + } + return level + } + } + + return "INFO" +} diff --git a/observability-logs-opensearch/internal/server.go b/observability-logs-opensearch/internal/server.go new file mode 100644 index 0000000..7305548 --- /dev/null +++ b/observability-logs-opensearch/internal/server.go @@ -0,0 +1,58 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package app + +import ( + "context" + "fmt" + "log/slog" + "net/http" + "time" + + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/api/gen" +) + +// Server wraps the HTTP server. +type Server struct { + port string + httpServer *http.Server + logger *slog.Logger +} + +// NewServer creates a new HTTP server with the generated handler. +func NewServer(port string, logsHandler *LogsHandler, logger *slog.Logger) *Server { + strictHandler := gen.NewStrictHandler(logsHandler, nil) + + mux := http.NewServeMux() + handler := gen.HandlerFromMux(strictHandler, mux) + + httpServer := &http.Server{ + Addr: ":" + port, + Handler: handler, + ReadTimeout: 15 * time.Second, + WriteTimeout: 15 * time.Second, + IdleTimeout: 60 * time.Second, + } + + return &Server{ + port: port, + httpServer: httpServer, + logger: logger, + } +} + +// Start begins listening for HTTP requests. +func (s *Server) Start() error { + s.logger.Info("Starting server", slog.String("port", s.port)) + if err := s.httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + return fmt.Errorf("failed to start server: %w", err) + } + return nil +} + +// Shutdown gracefully stops the server. +func (s *Server) Shutdown(ctx context.Context) error { + s.logger.Info("Shutting down server") + return s.httpServer.Shutdown(ctx) +} diff --git a/observability-logs-opensearch/main.go b/observability-logs-opensearch/main.go new file mode 100644 index 0000000..1db693e --- /dev/null +++ b/observability-logs-opensearch/main.go @@ -0,0 +1,95 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package main + +import ( + "context" + "log/slog" + "os" + "os/signal" + "syscall" + "time" + + app "github.com/openchoreo/community-modules/observability-logs-opensearch/internal" + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/observer" + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/opensearch" +) + +func main() { + cfg, err := app.LoadConfig() + if err != nil { + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{ + Level: slog.LevelInfo, + })) + logger.Error("Failed to load configuration", slog.Any("error", err)) + os.Exit(1) + } + + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{ + Level: cfg.LogLevel, + })) + + logger.Info("Configurations loaded from environment variables successfully", + slog.String("logLevel", cfg.LogLevel.String()), + slog.String("openSearchAddress", cfg.OpenSearchAddress), + slog.String("openSearchIndexPrefix", cfg.OpenSearchIndexPrefix), + slog.String("serverPort", cfg.ServerPort), + slog.String("observerURL", cfg.ObserverURL), + ) + + osClient, err := opensearch.NewClient( + cfg.OpenSearchAddress, + cfg.OpenSearchUsername, + cfg.OpenSearchPassword, + cfg.TLSSkipVerify, + logger, + ) + if err != nil { + logger.Error("Failed to create OpenSearch client", slog.Any("error", err)) + os.Exit(1) + } + + // Check OpenSearch connectivity when starting the adapter. If the connection fails, + // exit with an error because the adapter cannot function without connecting to + // OpenSearch. + logger.Info("Checking OpenSearch connectivity", slog.String("address", cfg.OpenSearchAddress)) + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + if err := osClient.CheckHealth(ctx); err != nil { + logger.Error("Failed to connect to OpenSearch. Cannot continue without it. Shutting down", + slog.Any("error", err)) + os.Exit(1) + } + + logger.Info("Successfully connected to OpenSearch") + + queryBuilder := opensearch.NewQueryBuilder(cfg.OpenSearchIndexPrefix) + observerClient := observer.NewClient(cfg.ObserverURL) + logsHandler := app.NewLogsHandler(osClient, queryBuilder, observerClient, logger) + srv := app.NewServer(cfg.ServerPort, logsHandler, logger) + + go func() { + if err := srv.Start(); err != nil { + logger.Error("Server error", slog.Any("error", err)) + os.Exit(1) + } + }() + + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + + logger.Info("Shutting down gracefully") + + shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 10*time.Second) + defer shutdownCancel() + + if err := srv.Shutdown(shutdownCtx); err != nil { + logger.Error("Error during shutdown", slog.Any("error", err)) + os.Exit(1) + } + + logger.Info("Server stopped") +} diff --git a/observability-logs-opensearch/module.yaml b/observability-logs-opensearch/module.yaml index f8bb51c..352df70 100644 --- a/observability-logs-opensearch/module.yaml +++ b/observability-logs-opensearch/module.yaml @@ -6,6 +6,9 @@ # corresponding field in helm/values.yaml to update with the published image URI. images: + - name: observability-logs-opensearch-adapter + context: . + dockerfile: Dockerfile - name: observability-logs-opensearch-setup context: init dockerfile: init/Dockerfile From 87235033d0d0ee5af9ccbfe3fbbb6c0e931ddab2 Mon Sep 17 00:00:00 2001 From: Nilushan Costa Date: Fri, 3 Apr 2026 12:16:33 +0530 Subject: [PATCH 2/2] test: add unit tests and address code review comments Signed-off-by: Nilushan Costa --- observability-logs-opensearch/Makefile | 2 +- .../internal/api/gen/server.gen.go | 9 + .../internal/handlers.go | 30 +- .../internal/handlers_test.go | 1168 +++++++++++++++++ .../internal/observer/client_test.go | 91 ++ .../internal/opensearch/client.go | 2 +- .../internal/opensearch/client_test.go | 505 +++++++ .../internal/opensearch/queries.go | 28 +- .../internal/opensearch/queries_test.go | 433 ++++++ .../internal/opensearch/types.go | 2 +- .../internal/opensearch/types_test.go | 201 +++ observability-logs-opensearch/main.go | 19 +- 12 files changed, 2469 insertions(+), 21 deletions(-) create mode 100644 observability-logs-opensearch/internal/handlers_test.go create mode 100644 observability-logs-opensearch/internal/observer/client_test.go create mode 100644 observability-logs-opensearch/internal/opensearch/client_test.go create mode 100644 observability-logs-opensearch/internal/opensearch/queries_test.go create mode 100644 observability-logs-opensearch/internal/opensearch/types_test.go diff --git a/observability-logs-opensearch/Makefile b/observability-logs-opensearch/Makefile index 138ff56..14029cd 100644 --- a/observability-logs-opensearch/Makefile +++ b/observability-logs-opensearch/Makefile @@ -16,4 +16,4 @@ MODULE_NAME := $(notdir $(CURDIR)) unit-test: go test -coverprofile=coverage.out ./... - mv coverage.out ../$(MODULE_NAME)-coverage.out + if [ -f coverage.out ]; then mv coverage.out ../$(MODULE_NAME)-coverage.out; fi diff --git a/observability-logs-opensearch/internal/api/gen/server.gen.go b/observability-logs-opensearch/internal/api/gen/server.gen.go index 08e5efa..ee5c909 100644 --- a/observability-logs-opensearch/internal/api/gen/server.gen.go +++ b/observability-logs-opensearch/internal/api/gen/server.gen.go @@ -530,6 +530,15 @@ func (response UpdateAlertRule400JSONResponse) VisitUpdateAlertRuleResponse(w ht return json.NewEncoder(w).Encode(response) } +type UpdateAlertRule404JSONResponse ErrorResponse + +func (response UpdateAlertRule404JSONResponse) VisitUpdateAlertRuleResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + + return json.NewEncoder(w).Encode(response) +} + type UpdateAlertRule500JSONResponse ErrorResponse func (response UpdateAlertRule500JSONResponse) VisitUpdateAlertRuleResponse(w http.ResponseWriter) error { diff --git a/observability-logs-opensearch/internal/handlers.go b/observability-logs-opensearch/internal/handlers.go index bc80abb..97f5d68 100644 --- a/observability-logs-opensearch/internal/handlers.go +++ b/observability-logs-opensearch/internal/handlers.go @@ -171,7 +171,16 @@ func (h *LogsHandler) queryComponentLogs(ctx context.Context, req *gen.LogsQuery TookMs: &took, } logs := gen.LogsQueryResponse_Logs{} - _ = logs.FromLogsQueryResponseLogs0(entries) + if err := logs.FromLogsQueryResponseLogs0(entries); err != nil { + h.logger.Error("Failed to serialize component log entries", + slog.String("function", "QueryLogs"), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } resp.Logs = &logs return gen.QueryLogs200JSONResponse(resp), nil @@ -268,7 +277,16 @@ func (h *LogsHandler) queryWorkflowLogs(ctx context.Context, req *gen.LogsQueryR TookMs: &took, } logs := gen.LogsQueryResponse_Logs{} - _ = logs.FromLogsQueryResponseLogs1(entries) + if err := logs.FromLogsQueryResponseLogs1(entries); err != nil { + h.logger.Error("Failed to serialize workflow log entries", + slog.String("function", "QueryLogs"), + slog.Any("error", err), + ) + return gen.QueryLogs500JSONResponse{ + Title: ptr(gen.InternalServerError), + Message: ptr("internal server error"), + }, nil + } resp.Logs = &logs return gen.QueryLogs200JSONResponse(resp), nil @@ -335,8 +353,8 @@ func (h *LogsHandler) DeleteAlertRule(ctx context.Context, request gen.DeleteAle }, nil } if !found { - return gen.DeleteAlertRule500JSONResponse{ - Title: ptr(gen.InternalServerError), + return gen.DeleteAlertRule404JSONResponse{ + Title: ptr(gen.NotFound), Message: ptr("alert rule not found"), }, nil } @@ -435,8 +453,8 @@ func (h *LogsHandler) UpdateAlertRule(ctx context.Context, request gen.UpdateAle }, nil } if !found { - return gen.UpdateAlertRule400JSONResponse{ - Title: ptr(gen.BadRequest), + return gen.UpdateAlertRule404JSONResponse{ + Title: ptr(gen.NotFound), Message: ptr("alert rule not found"), }, nil } diff --git a/observability-logs-opensearch/internal/handlers_test.go b/observability-logs-opensearch/internal/handlers_test.go new file mode 100644 index 0000000..9dc0e04 --- /dev/null +++ b/observability-logs-opensearch/internal/handlers_test.go @@ -0,0 +1,1168 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package app + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + openapi_types "github.com/oapi-codegen/runtime/types" + "github.com/opensearch-project/opensearch-go/v4" + "github.com/opensearch-project/opensearch-go/v4/opensearchapi" + + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/api/gen" + "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/observer" + osearch "github.com/openchoreo/community-modules/observability-logs-opensearch/internal/opensearch" +) + +func testLogger() *slog.Logger { + return slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: slog.LevelError})) +} + +// newTestOSClient creates an opensearch.Client pointing at the given test server URL. +func newTestOSClient(t *testing.T, serverURL string) *osearch.Client { + t.Helper() + client, err := osearch.NewClient(serverURL, "", "", true, testLogger()) + if err != nil { + t.Fatalf("failed to create test opensearch client: %v", err) + } + return client +} + +func TestHealth(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + resp, err := handler.Health(context.Background(), gen.HealthRequestObject{}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + healthResp, ok := resp.(gen.Health200JSONResponse) + if !ok { + t.Fatalf("unexpected response type: %T", resp) + } + if healthResp.Status == nil || *healthResp.Status != "healthy" { + t.Errorf("expected status 'healthy', got %v", healthResp.Status) + } +} + +func TestQueryLogs_NilBody(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: nil}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.QueryLogs400JSONResponse); !ok { + t.Fatalf("expected 400 response, got %T", resp) + } +} + +func TestQueryLogs_ComponentScope_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := map[string]interface{}{ + "took": 3, + "timed_out": false, + "hits": map[string]interface{}{ + "total": map[string]interface{}{ + "value": 1, + "relation": "eq", + }, + "hits": []map[string]interface{}{ + { + "_id": "hit-1", + "_score": 1.0, + "_source": map[string]interface{}{ + "log": "INFO application started", + "@timestamp": "2025-06-15T10:00:00Z", + "kubernetes": map[string]interface{}{ + "namespace_name": "test-ns", + "pod_name": "my-pod", + "container_name": "main", + "labels": map[string]interface{}{}, + }, + }, + }, + }, + }, + } + json.NewEncoder(w).Encode(resp) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + startTime := time.Date(2025, 6, 15, 0, 0, 0, 0, time.UTC) + endTime := time.Date(2025, 6, 15, 23, 59, 59, 0, time.UTC) + + searchScope := gen.LogsQueryRequest_SearchScope{} + _ = searchScope.FromComponentSearchScope(gen.ComponentSearchScope{ + Namespace: "test-ns", + }) + + body := gen.LogsQueryRequest{ + StartTime: startTime, + EndTime: endTime, + SearchScope: searchScope, + } + + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + queryResp, ok := resp.(gen.QueryLogs200JSONResponse) + if !ok { + t.Fatalf("expected 200 response, got %T", resp) + } + if queryResp.Total == nil || *queryResp.Total != 1 { + t.Errorf("expected total=1, got %v", queryResp.Total) + } +} + +func TestQueryLogs_ComponentScope_EmptyNamespace(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + + searchScope := gen.LogsQueryRequest_SearchScope{} + _ = searchScope.FromComponentSearchScope(gen.ComponentSearchScope{ + Namespace: "", + }) + + body := gen.LogsQueryRequest{ + StartTime: time.Now(), + EndTime: time.Now(), + SearchScope: searchScope, + } + + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.QueryLogs400JSONResponse); !ok { + t.Fatalf("expected 400 response, got %T", resp) + } +} + +func TestQueryLogs_ComponentScope_SearchError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusInternalServerError) + fmt.Fprint(w, `{"error":"search failed"}`) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + searchScope := gen.LogsQueryRequest_SearchScope{} + _ = searchScope.FromComponentSearchScope(gen.ComponentSearchScope{ + Namespace: "test-ns", + }) + + body := gen.LogsQueryRequest{ + StartTime: time.Date(2025, 6, 15, 0, 0, 0, 0, time.UTC), + EndTime: time.Date(2025, 6, 15, 23, 59, 59, 0, time.UTC), + SearchScope: searchScope, + } + + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.QueryLogs500JSONResponse); !ok { + t.Fatalf("expected 500 response, got %T", resp) + } +} + +func TestQueryLogs_WorkflowScope_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := map[string]interface{}{ + "took": 2, + "timed_out": false, + "hits": map[string]interface{}{ + "total": map[string]interface{}{ + "value": 1, + "relation": "eq", + }, + "hits": []map[string]interface{}{ + { + "_id": "wf-hit-1", + "_score": 1.0, + "_source": map[string]interface{}{ + "log": "step completed", + "@timestamp": "2025-06-15T10:00:00Z", + }, + }, + }, + }, + } + json.NewEncoder(w).Encode(resp) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + workflowRunName := "run-123" + searchScope := gen.LogsQueryRequest_SearchScope{} + _ = searchScope.FromWorkflowSearchScope(gen.WorkflowSearchScope{ + Namespace: "test-ns", + WorkflowRunName: &workflowRunName, + }) + + body := gen.LogsQueryRequest{ + StartTime: time.Date(2025, 6, 15, 0, 0, 0, 0, time.UTC), + EndTime: time.Date(2025, 6, 15, 23, 59, 59, 0, time.UTC), + SearchScope: searchScope, + } + + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + queryResp, ok := resp.(gen.QueryLogs200JSONResponse) + if !ok { + t.Fatalf("expected 200 response, got %T", resp) + } + if queryResp.Total == nil || *queryResp.Total != 1 { + t.Errorf("expected total=1, got %v", queryResp.Total) + } +} + +func TestQueryLogs_WorkflowScope_EmptyNamespace(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + + workflowRunName := "run-123" + searchScope := gen.LogsQueryRequest_SearchScope{} + _ = searchScope.FromWorkflowSearchScope(gen.WorkflowSearchScope{ + Namespace: "", + WorkflowRunName: &workflowRunName, + }) + + body := gen.LogsQueryRequest{ + StartTime: time.Now(), + EndTime: time.Now(), + SearchScope: searchScope, + } + + resp, err := handler.QueryLogs(context.Background(), gen.QueryLogsRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.QueryLogs400JSONResponse); !ok { + t.Fatalf("expected 400 response, got %T", resp) + } +} + +func TestCreateAlertRule_NilBody(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + resp, err := handler.CreateAlertRule(context.Background(), gen.CreateAlertRuleRequestObject{Body: nil}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.CreateAlertRule400JSONResponse); !ok { + t.Fatalf("expected 400 response, got %T", resp) + } +} + +func TestCreateAlertRule_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors" && r.Method == "POST" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + fmt.Fprint(w, `{"_id": "b2c3d4e5-f6a7-8901-bcde-f12345678901", "monitor": {"last_update_time": 1718444400000}}`) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + body := gen.AlertRuleRequest{ + Metadata: struct { + ComponentUid openapi_types.UUID `json:"componentUid"` + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + Name string `json:"name"` + Namespace string `json:"namespace"` + ProjectUid openapi_types.UUID `json:"projectUid"` + }{ + Name: "test-rule", + Namespace: "test-ns", + }, + Source: struct { + Query string `json:"query"` + }{ + Query: "error", + }, + Condition: struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval"` + Operator gen.AlertRuleRequestConditionOperator `json:"operator"` + Threshold float32 `json:"threshold"` + Window string `json:"window"` + }{ + Enabled: true, + Window: "1h", + Interval: "5m", + Operator: gen.AlertRuleRequestConditionOperatorGt, + Threshold: 10, + }, + } + + resp, err := handler.CreateAlertRule(context.Background(), gen.CreateAlertRuleRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + createResp, ok := resp.(gen.CreateAlertRule201JSONResponse) + if !ok { + t.Fatalf("expected 201 response, got %T", resp) + } + if createResp.RuleBackendId == nil || *createResp.RuleBackendId != "b2c3d4e5-f6a7-8901-bcde-f12345678901" { + t.Errorf("expected ruleBackendId 'b2c3d4e5-f6a7-8901-bcde-f12345678901', got %v", createResp.RuleBackendId) + } +} + +func TestCreateAlertRule_CreateError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusBadRequest) + fmt.Fprint(w, `{"error":"bad request"}`) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + body := gen.AlertRuleRequest{ + Metadata: struct { + ComponentUid openapi_types.UUID `json:"componentUid"` + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + Name string `json:"name"` + Namespace string `json:"namespace"` + ProjectUid openapi_types.UUID `json:"projectUid"` + }{Name: "test-rule", Namespace: "test-ns"}, + Source: struct { + Query string `json:"query"` + }{Query: "error"}, + Condition: struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval"` + Operator gen.AlertRuleRequestConditionOperator `json:"operator"` + Threshold float32 `json:"threshold"` + Window string `json:"window"` + }{Window: "1h", Interval: "5m", Operator: gen.AlertRuleRequestConditionOperatorGt, Threshold: 10}, + } + + resp, err := handler.CreateAlertRule(context.Background(), gen.CreateAlertRuleRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.CreateAlertRule500JSONResponse); !ok { + t.Fatalf("expected 500 response, got %T", resp) + } +} + +// deleteAlertRuleServer returns an httptest.Server that handles monitor search and delete. +func deleteAlertRuleServer(monitorFound bool, deleteErr bool) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + if monitorFound { + fmt.Fprint(w, `{"hits":{"total":{"value":1},"hits":[{"_id":"a1b2c3d4-e5f6-7890-abcd-ef1234567890","_source":{}}]}}`) + } else { + fmt.Fprint(w, `{"hits":{"total":{"value":0},"hits":[]}}`) + } + return + } + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "DELETE" { + if deleteErr { + w.WriteHeader(http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusOK) + return + } + w.WriteHeader(http.StatusOK) + })) +} + +func TestDeleteAlertRule_Success(t *testing.T) { + server := deleteAlertRuleServer(true, false) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.DeleteAlertRule(context.Background(), gen.DeleteAlertRuleRequestObject{RuleName: "test-rule"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.DeleteAlertRule200JSONResponse); !ok { + t.Fatalf("expected 200 response, got %T", resp) + } +} + +func TestDeleteAlertRule_NotFound(t *testing.T) { + server := deleteAlertRuleServer(false, false) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.DeleteAlertRule(context.Background(), gen.DeleteAlertRuleRequestObject{RuleName: "nonexistent"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.DeleteAlertRule404JSONResponse); !ok { + t.Fatalf("expected 404 response for not found, got %T", resp) + } +} + +func TestDeleteAlertRule_SearchError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.DeleteAlertRule(context.Background(), gen.DeleteAlertRuleRequestObject{RuleName: "test-rule"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.DeleteAlertRule500JSONResponse); !ok { + t.Fatalf("expected 500 response, got %T", resp) + } +} + +// getAlertRuleServer returns an httptest.Server for GetAlertRule tests. +func getAlertRuleServer(monitorFound bool, monitorData map[string]interface{}) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + if monitorFound { + fmt.Fprint(w, `{"hits":{"total":{"value":1},"hits":[{"_id":"a1b2c3d4-e5f6-7890-abcd-ef1234567890","_source":{}}]}}`) + } else { + fmt.Fprint(w, `{"hits":{"total":{"value":0},"hits":[]}}`) + } + return + } + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "GET" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := map[string]interface{}{"monitor": monitorData} + json.NewEncoder(w).Encode(resp) + return + } + w.WriteHeader(http.StatusOK) + })) +} + +func TestGetAlertRule_Success(t *testing.T) { + monitorData := map[string]interface{}{ + "name": "test-rule", + "enabled": true, + "schedule": map[string]interface{}{ + "period": map[string]interface{}{ + "interval": float64(5), + "unit": "MINUTES", + }, + }, + "inputs": []interface{}{ + map[string]interface{}{ + "search": map[string]interface{}{ + "indices": []interface{}{"logs-*"}, + "query": map[string]interface{}{ + "size": 0, + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "filter": []interface{}{ + map[string]interface{}{ + "range": map[string]interface{}{ + "@timestamp": map[string]interface{}{ + "from": "{{period_end}}||-1h", + "to": "{{period_end}}", + }, + }, + }, + map[string]interface{}{ + "wildcard": map[string]interface{}{ + "log": map[string]interface{}{ + "wildcard": "*error*", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "triggers": []interface{}{ + map[string]interface{}{ + "query_level_trigger": map[string]interface{}{ + "name": "trigger-test-rule", + "severity": "1", + "condition": map[string]interface{}{ + "script": map[string]interface{}{ + "source": "ctx.results[0].hits.total.value > 10", + "lang": "painless", + }, + }, + }, + }, + }, + } + + server := getAlertRuleServer(true, monitorData) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.GetAlertRule(context.Background(), gen.GetAlertRuleRequestObject{RuleName: "test-rule"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + getResp, ok := resp.(gen.GetAlertRule200JSONResponse) + if !ok { + t.Fatalf("expected 200 response, got %T", resp) + } + if getResp.Metadata == nil || getResp.Metadata.Name == nil || *getResp.Metadata.Name != "test-rule" { + t.Error("expected metadata name to be 'test-rule'") + } +} + +func TestGetAlertRule_NotFound(t *testing.T) { + server := getAlertRuleServer(false, nil) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.GetAlertRule(context.Background(), gen.GetAlertRuleRequestObject{RuleName: "nonexistent"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.GetAlertRule404JSONResponse); !ok { + t.Fatalf("expected 404 response, got %T", resp) + } +} + +func TestGetAlertRule_GetError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"hits":{"total":{"value":1},"hits":[{"_id":"a1b2c3d4-e5f6-7890-abcd-ef1234567890","_source":{}}]}}`) + return + } + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "GET" { + w.WriteHeader(http.StatusInternalServerError) + fmt.Fprint(w, `{"error":"internal error"}`) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + handler := NewLogsHandler(osClient, nil, nil, testLogger()) + + resp, err := handler.GetAlertRule(context.Background(), gen.GetAlertRuleRequestObject{RuleName: "test-rule"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.GetAlertRule500JSONResponse); !ok { + t.Fatalf("expected 500 response, got %T", resp) + } +} + +func TestUpdateAlertRule_NilBody(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + resp, err := handler.UpdateAlertRule(context.Background(), gen.UpdateAlertRuleRequestObject{ + RuleName: "test", + Body: nil, + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.UpdateAlertRule400JSONResponse); !ok { + t.Fatalf("expected 400 response, got %T", resp) + } +} + +func TestUpdateAlertRule_Success(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"hits":{"total":{"value":1},"hits":[{"_id":"a1b2c3d4-e5f6-7890-abcd-ef1234567890","_source":{}}]}}`) + return + } + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "PUT" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890", "monitor": {"last_update_time": 1718444400001}}`) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + body := gen.AlertRuleRequest{ + Metadata: struct { + ComponentUid openapi_types.UUID `json:"componentUid"` + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + Name string `json:"name"` + Namespace string `json:"namespace"` + ProjectUid openapi_types.UUID `json:"projectUid"` + }{Name: "test-rule", Namespace: "test-ns"}, + Source: struct { + Query string `json:"query"` + }{Query: "error"}, + Condition: struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval"` + Operator gen.AlertRuleRequestConditionOperator `json:"operator"` + Threshold float32 `json:"threshold"` + Window string `json:"window"` + }{Window: "1h", Interval: "5m", Operator: gen.AlertRuleRequestConditionOperatorGt, Threshold: 10}, + } + + resp, err := handler.UpdateAlertRule(context.Background(), gen.UpdateAlertRuleRequestObject{ + RuleName: "test-rule", + Body: &body, + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.UpdateAlertRule200JSONResponse); !ok { + t.Fatalf("expected 200 response, got %T", resp) + } +} + +func TestUpdateAlertRule_NotFound(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"hits":{"total":{"value":0},"hits":[]}}`) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + osClient := newTestOSClient(t, server.URL) + qb := osearch.NewQueryBuilder("logs-") + handler := NewLogsHandler(osClient, qb, nil, testLogger()) + + body := gen.AlertRuleRequest{ + Metadata: struct { + ComponentUid openapi_types.UUID `json:"componentUid"` + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + Name string `json:"name"` + Namespace string `json:"namespace"` + ProjectUid openapi_types.UUID `json:"projectUid"` + }{Name: "test-rule", Namespace: "test-ns"}, + Source: struct { + Query string `json:"query"` + }{Query: "error"}, + Condition: struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval"` + Operator gen.AlertRuleRequestConditionOperator `json:"operator"` + Threshold float32 `json:"threshold"` + Window string `json:"window"` + }{Window: "1h", Interval: "5m", Operator: gen.AlertRuleRequestConditionOperatorGt, Threshold: 10}, + } + + resp, err := handler.UpdateAlertRule(context.Background(), gen.UpdateAlertRuleRequestObject{ + RuleName: "test-rule", + Body: &body, + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.UpdateAlertRule404JSONResponse); !ok { + t.Fatalf("expected 404 response, got %T", resp) + } +} + +func TestHandleAlertWebhook_NilBody(t *testing.T) { + handler := NewLogsHandler(nil, nil, nil, testLogger()) + resp, err := handler.HandleAlertWebhook(context.Background(), gen.HandleAlertWebhookRequestObject{Body: nil}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + webhookResp, ok := resp.(gen.HandleAlertWebhook200JSONResponse) + if !ok { + t.Fatalf("expected 200 response, got %T", resp) + } + if webhookResp.Status == nil || *webhookResp.Status != gen.Success { + t.Error("expected status Success") + } +} + +func TestHandleAlertWebhook_ValidBody(t *testing.T) { + forwardCh := make(chan bool, 1) + observerServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + forwardCh <- true + })) + defer observerServer.Close() + + observerClient := observer.NewClient(observerServer.URL) + handler := NewLogsHandler(nil, nil, observerClient, testLogger()) + + ts := time.Date(2025, 6, 15, 10, 30, 0, 0, time.UTC) + body := map[string]interface{}{ + "ruleName": "test-rule", + "ruleNamespace": "test-ns", + "alertValue": float64(5), + "alertTimestamp": ts.Format(time.RFC3339), + } + + resp, err := handler.HandleAlertWebhook(context.Background(), gen.HandleAlertWebhookRequestObject{Body: &body}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, ok := resp.(gen.HandleAlertWebhook200JSONResponse); !ok { + t.Fatalf("expected 200 response, got %T", resp) + } + + // Wait for the goroutine to forward the alert + select { + case <-forwardCh: + // success + case <-time.After(5 * time.Second): + t.Error("timed out waiting for alert forwarding") + } +} + +func TestParseAlertWebhookBody(t *testing.T) { + t.Run("valid body", func(t *testing.T) { + ts := time.Date(2025, 6, 15, 10, 30, 0, 0, time.UTC) + body := map[string]interface{}{ + "ruleName": "test-rule", + "ruleNamespace": "test-ns", + "alertValue": float64(5), + "alertTimestamp": ts.Format(time.RFC3339), + } + + ruleName, ruleNamespace, alertValue, alertTimestamp, err := parseAlertWebhookBody(body) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if ruleName != "test-rule" { + t.Errorf("expected ruleName 'test-rule', got %q", ruleName) + } + if ruleNamespace != "test-ns" { + t.Errorf("expected ruleNamespace 'test-ns', got %q", ruleNamespace) + } + if alertValue != 5 { + t.Errorf("expected alertValue 5, got %v", alertValue) + } + if !alertTimestamp.Equal(ts) { + t.Errorf("expected alertTimestamp %v, got %v", ts, alertTimestamp) + } + }) + + t.Run("missing ruleName", func(t *testing.T) { + body := map[string]interface{}{ + "alertValue": float64(1), + } + _, _, _, _, err := parseAlertWebhookBody(body) + if err == nil { + t.Error("expected error for missing ruleName") + } + }) + + t.Run("alertValue as string", func(t *testing.T) { + body := map[string]interface{}{ + "ruleName": "test", + "alertValue": "42.5", + } + _, _, alertValue, _, err := parseAlertWebhookBody(body) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if alertValue != 42.5 { + t.Errorf("expected alertValue 42.5, got %v", alertValue) + } + }) + + t.Run("non-RFC3339 timestamp uses current time", func(t *testing.T) { + body := map[string]interface{}{ + "ruleName": "test", + "alertTimestamp": "not-a-timestamp", + } + before := time.Now().Add(-time.Second) + _, _, _, alertTimestamp, err := parseAlertWebhookBody(body) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if alertTimestamp.Before(before) { + t.Errorf("expected alertTimestamp to be recent, got %v", alertTimestamp) + } + }) +} + +func TestParseConditionScript(t *testing.T) { + t.Run("valid script", func(t *testing.T) { + operator, threshold := parseConditionScript("ctx.results[0].hits.total.value > 10") + if operator != ">" { + t.Errorf("expected operator '>', got %q", operator) + } + if threshold != 10 { + t.Errorf("expected threshold 10, got %v", threshold) + } + }) + + t.Run("short input", func(t *testing.T) { + operator, threshold := parseConditionScript("ab") + if operator != "" { + t.Errorf("expected empty operator, got %q", operator) + } + if threshold != 0 { + t.Errorf("expected threshold 0, got %v", threshold) + } + }) +} + +func TestFormatScheduleToInterval(t *testing.T) { + tests := []struct { + interval float64 + unit string + expected string + }{ + {5, "MINUTES", "5m"}, + {2, "HOURS", "2h"}, + {10, "UNKNOWN", "10m"}, + } + + for _, tt := range tests { + t.Run(tt.unit, func(t *testing.T) { + result := formatScheduleToInterval(tt.interval, tt.unit) + if result != tt.expected { + t.Errorf("formatScheduleToInterval(%v, %q) = %q, want %q", tt.interval, tt.unit, result, tt.expected) + } + }) + } +} + +func TestExtractWindowFromQuery(t *testing.T) { + t.Run("valid monitor", func(t *testing.T) { + monitor := map[string]interface{}{ + "inputs": []interface{}{ + map[string]interface{}{ + "search": map[string]interface{}{ + "query": map[string]interface{}{ + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "filter": []interface{}{ + map[string]interface{}{ + "range": map[string]interface{}{ + "@timestamp": map[string]interface{}{ + "from": "{{period_end}}||-1h", + "to": "{{period_end}}", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + window := extractWindowFromQuery(monitor) + if window != "1h" { + t.Errorf("expected window '1h', got %q", window) + } + }) + + t.Run("missing fields", func(t *testing.T) { + monitor := map[string]interface{}{} + window := extractWindowFromQuery(monitor) + if window != "" { + t.Errorf("expected empty window, got %q", window) + } + }) +} + +func TestParseMonitorToAlertRuleResponse(t *testing.T) { + monitor := map[string]interface{}{ + "name": "test-rule", + "enabled": true, + "schedule": map[string]interface{}{ + "period": map[string]interface{}{ + "interval": float64(5), + "unit": "MINUTES", + }, + }, + "inputs": []interface{}{ + map[string]interface{}{ + "search": map[string]interface{}{ + "indices": []interface{}{"logs-*"}, + "query": map[string]interface{}{ + "size": 0, + "query": map[string]interface{}{ + "bool": map[string]interface{}{ + "filter": []interface{}{ + map[string]interface{}{ + "range": map[string]interface{}{ + "@timestamp": map[string]interface{}{ + "from": "{{period_end}}||-1h", + "to": "{{period_end}}", + }, + }, + }, + map[string]interface{}{ + "term": map[string]interface{}{ + osearch.OSComponentID: map[string]interface{}{ + "value": "550e8400-e29b-41d4-a716-446655440000", + }, + }, + }, + map[string]interface{}{ + "wildcard": map[string]interface{}{ + "log": map[string]interface{}{ + "wildcard": "*error*", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "triggers": []interface{}{ + map[string]interface{}{ + "query_level_trigger": map[string]interface{}{ + "name": "trigger-test-rule", + "severity": "1", + "condition": map[string]interface{}{ + "script": map[string]interface{}{ + "source": "ctx.results[0].hits.total.value > 10", + "lang": "painless", + }, + }, + }, + }, + }, + } + + resp, err := parseMonitorToAlertRuleResponse(monitor) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if resp.Metadata == nil || resp.Metadata.Name == nil || *resp.Metadata.Name != "test-rule" { + t.Error("expected metadata name 'test-rule'") + } + if resp.Condition == nil || resp.Condition.Enabled == nil || *resp.Condition.Enabled != true { + t.Error("expected condition enabled=true") + } + if resp.Condition.Window == nil || *resp.Condition.Window != "1h" { + t.Errorf("expected window '1h', got %v", resp.Condition.Window) + } + if resp.Condition.Interval == nil || *resp.Condition.Interval != "5m" { + t.Errorf("expected interval '5m', got %v", resp.Condition.Interval) + } + if resp.Source == nil || resp.Source.Query == nil || *resp.Source.Query != "error" { + t.Errorf("expected source query 'error', got %v", resp.Source) + } +} + +func TestToAlertingRuleRequest(t *testing.T) { + body := &gen.AlertRuleRequest{ + Metadata: struct { + ComponentUid openapi_types.UUID `json:"componentUid"` + EnvironmentUid openapi_types.UUID `json:"environmentUid"` + Name string `json:"name"` + Namespace string `json:"namespace"` + ProjectUid openapi_types.UUID `json:"projectUid"` + }{ + Name: "test-rule", + Namespace: "test-ns", + }, + Source: struct { + Query string `json:"query"` + }{ + Query: "error", + }, + Condition: struct { + Enabled bool `json:"enabled"` + Interval string `json:"interval"` + Operator gen.AlertRuleRequestConditionOperator `json:"operator"` + Threshold float32 `json:"threshold"` + Window string `json:"window"` + }{ + Enabled: true, + Window: "1h", + Interval: "5m", + Operator: gen.AlertRuleRequestConditionOperatorGt, + Threshold: 10, + }, + } + + result := toAlertingRuleRequest(body) + if result.Metadata.Name != "test-rule" { + t.Errorf("expected name 'test-rule', got %q", result.Metadata.Name) + } + if result.Metadata.Namespace != "test-ns" { + t.Errorf("expected namespace 'test-ns', got %q", result.Metadata.Namespace) + } + if result.Source.Query != "error" { + t.Errorf("expected query 'error', got %q", result.Source.Query) + } + if result.Condition.Operator != "gt" { + t.Errorf("expected operator 'gt', got %q", result.Condition.Operator) + } + if result.Condition.Threshold != 10 { + t.Errorf("expected threshold 10, got %v", result.Condition.Threshold) + } +} + +func TestToComponentLogEntry(t *testing.T) { + t.Run("with UIDs", func(t *testing.T) { + entry := &osearch.LogEntry{ + Timestamp: time.Date(2025, 6, 15, 10, 0, 0, 0, time.UTC), + Log: "test log", + LogLevel: "INFO", + ComponentID: "550e8400-e29b-41d4-a716-446655440000", + EnvironmentID: "550e8400-e29b-41d4-a716-446655440001", + ProjectID: "550e8400-e29b-41d4-a716-446655440002", + ComponentName: "my-comp", + EnvironmentName: "dev", + ProjectName: "my-proj", + NamespaceName: "test-ns", + PodName: "my-pod", + PodNamespace: "ns-1", + ContainerName: "main", + } + + result := toComponentLogEntry(entry) + if result.Log == nil || *result.Log != "test log" { + t.Error("expected log 'test log'") + } + if result.Level == nil || *result.Level != "INFO" { + t.Error("expected level 'INFO'") + } + if result.Metadata == nil { + t.Fatal("expected metadata to be set") + } + if result.Metadata.ComponentUid == nil { + t.Error("expected componentUid to be set") + } + if result.Metadata.ProjectUid == nil { + t.Error("expected projectUid to be set") + } + if result.Metadata.EnvironmentUid == nil { + t.Error("expected environmentUid to be set") + } + }) + + t.Run("without UIDs", func(t *testing.T) { + entry := &osearch.LogEntry{ + Log: "test log", + LogLevel: "ERROR", + } + + result := toComponentLogEntry(entry) + if result.Metadata.ComponentUid != nil { + t.Error("expected componentUid to be nil") + } + if result.Metadata.ProjectUid != nil { + t.Error("expected projectUid to be nil") + } + }) +} + +func TestParseUUID(t *testing.T) { + t.Run("valid", func(t *testing.T) { + uid, ok := parseUUID("550e8400-e29b-41d4-a716-446655440000") + if !ok { + t.Fatal("expected successful parse") + } + if uid.String() != "550e8400-e29b-41d4-a716-446655440000" { + t.Errorf("unexpected UUID: %s", uid.String()) + } + }) + + t.Run("invalid", func(t *testing.T) { + _, ok := parseUUID("not-a-uuid") + if ok { + t.Error("expected parse to fail") + } + }) + + t.Run("empty", func(t *testing.T) { + _, ok := parseUUID("") + if ok { + t.Error("expected parse to fail for empty string") + } + }) +} + +func TestStrPtr(t *testing.T) { + result := strPtr("hello") + if result == nil || *result != "hello" { + t.Error("expected non-nil pointer to 'hello'") + } + + result = strPtr("") + if result != nil { + t.Error("expected nil for empty string") + } +} + +func TestPtr(t *testing.T) { + intVal := ptr(42) + if intVal == nil || *intVal != 42 { + t.Error("expected pointer to 42") + } + + strVal := ptr("test") + if strVal == nil || *strVal != "test" { + t.Error("expected pointer to 'test'") + } +} + +// Verify the unused imports are actually used - this is a compile check. +var _ = opensearchapi.Config{} +var _ = opensearch.Config{} diff --git a/observability-logs-opensearch/internal/observer/client_test.go b/observability-logs-opensearch/internal/observer/client_test.go new file mode 100644 index 0000000..2348112 --- /dev/null +++ b/observability-logs-opensearch/internal/observer/client_test.go @@ -0,0 +1,91 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package observer + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + "time" +) + +func TestNewClient(t *testing.T) { + c := NewClient("http://localhost:8080/") + if c.baseURL != "http://localhost:8080" { + t.Errorf("expected trailing slash removed, got %q", c.baseURL) + } +} + +func TestForwardAlert_Success(t *testing.T) { + alertTime := time.Date(2025, 6, 15, 10, 30, 0, 0, time.UTC) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/api/v1alpha1/alerts/webhook" { + t.Errorf("unexpected path: %s", r.URL.Path) + } + if r.Method != http.MethodPost { + t.Errorf("unexpected method: %s", r.Method) + } + if r.Header.Get("Content-Type") != "application/json" { + t.Errorf("unexpected content-type: %s", r.Header.Get("Content-Type")) + } + + body, err := io.ReadAll(r.Body) + if err != nil { + t.Fatalf("failed to read body: %v", err) + } + + var payload alertWebhookRequest + if err := json.Unmarshal(body, &payload); err != nil { + t.Fatalf("failed to unmarshal body: %v", err) + } + + if payload.RuleName != "my-rule" { + t.Errorf("expected ruleName 'my-rule', got %q", payload.RuleName) + } + if payload.RuleNamespace != "test-ns" { + t.Errorf("expected ruleNamespace 'test-ns', got %q", payload.RuleNamespace) + } + if payload.AlertValue != 42.5 { + t.Errorf("expected alertValue 42.5, got %v", payload.AlertValue) + } + if !payload.AlertTimestamp.Equal(alertTime) { + t.Errorf("expected alertTimestamp %v, got %v", alertTime, payload.AlertTimestamp) + } + + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := NewClient(server.URL) + err := client.ForwardAlert(context.Background(), "my-rule", "test-ns", 42.5, alertTime) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestForwardAlert_ServerError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + _, _ = w.Write([]byte("internal error")) + })) + defer server.Close() + + client := NewClient(server.URL) + err := client.ForwardAlert(context.Background(), "my-rule", "test-ns", 1, time.Now()) + if err == nil { + t.Fatal("expected error for server error response") + } +} + +func TestForwardAlert_ConnectionError(t *testing.T) { + client := NewClient("http://localhost:1") // unreachable port + err := client.ForwardAlert(context.Background(), "my-rule", "test-ns", 1, time.Now()) + if err == nil { + t.Fatal("expected error for connection failure") + } +} diff --git a/observability-logs-opensearch/internal/opensearch/client.go b/observability-logs-opensearch/internal/opensearch/client.go index 2c41117..fe05350 100644 --- a/observability-logs-opensearch/internal/opensearch/client.go +++ b/observability-logs-opensearch/internal/opensearch/client.go @@ -187,7 +187,7 @@ func (c *Client) CreateMonitor(ctx context.Context, monitor map[string]interface c.logger.Debug("Creating monitor", "body", string(body)) path := "/_plugins/_alerting/monitors" - req, err := http.NewRequest("POST", path, bytes.NewReader(body)) + req, err := http.NewRequestWithContext(ctx, "POST", path, bytes.NewReader(body)) if err != nil { return "", 0, fmt.Errorf("failed to create request: %w", err) } diff --git a/observability-logs-opensearch/internal/opensearch/client_test.go b/observability-logs-opensearch/internal/opensearch/client_test.go new file mode 100644 index 0000000..3f1d192 --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/client_test.go @@ -0,0 +1,505 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/opensearch-project/opensearch-go/v4" + "github.com/opensearch-project/opensearch-go/v4/opensearchapi" +) + +func testLogger() *slog.Logger { + return slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{Level: slog.LevelError})) +} + +// newTestClient creates a Client that points at the given test server URL. +func newTestClient(t *testing.T, serverURL string) *Client { + t.Helper() + apiClient, err := opensearchapi.NewClient(opensearchapi.Config{ + Client: opensearch.Config{ + Addresses: []string{serverURL}, + }, + }) + if err != nil { + t.Fatalf("failed to create test client: %v", err) + } + return &Client{ + client: apiClient, + logger: testLogger(), + } +} + +func TestCheckHealth(t *testing.T) { + t.Run("green status", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_cluster/health" { + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{"status": "green"}) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.CheckHealth(context.Background()) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + }) + + t.Run("yellow status", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_cluster/health" { + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{"status": "yellow"}) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.CheckHealth(context.Background()) + if err != nil { + t.Fatalf("unexpected error for yellow status: %v", err) + } + }) + + t.Run("red status", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_cluster/health" { + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]string{"status": "red"}) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.CheckHealth(context.Background()) + if err == nil { + t.Fatal("expected error for red status") + } + if !strings.Contains(err.Error(), "red") { + t.Errorf("expected error to mention 'red', got: %v", err) + } + }) + + t.Run("error response", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_cluster/health" { + w.WriteHeader(http.StatusServiceUnavailable) + fmt.Fprint(w, "cluster unavailable") + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.CheckHealth(context.Background()) + if err == nil { + t.Fatal("expected error for non-200 response") + } + }) +} + +func TestSearch(t *testing.T) { + t.Run("success with hits", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := map[string]interface{}{ + "took": 5, + "timed_out": false, + "hits": map[string]interface{}{ + "total": map[string]interface{}{ + "value": 2, + "relation": "eq", + }, + "hits": []map[string]interface{}{ + { + "_id": "hit-1", + "_score": 1.5, + "_source": map[string]interface{}{ + "log": "test log 1", + "@timestamp": "2025-06-15T10:00:00Z", + }, + }, + { + "_id": "hit-2", + "_score": 1.0, + "_source": map[string]interface{}{ + "log": "test log 2", + "@timestamp": "2025-06-15T11:00:00Z", + }, + }, + }, + }, + } + json.NewEncoder(w).Encode(resp) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + query := map[string]interface{}{ + "size": 10, + "query": map[string]interface{}{"match_all": map[string]interface{}{}}, + } + + result, err := client.Search(context.Background(), []string{"logs-*"}, query) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if result.Hits.Total.Value != 2 { + t.Errorf("expected 2 total hits, got %d", result.Hits.Total.Value) + } + if len(result.Hits.Hits) != 2 { + t.Errorf("expected 2 hits, got %d", len(result.Hits.Hits)) + } + if result.Took != 5 { + t.Errorf("expected took=5, got %d", result.Took) + } + }) + + t.Run("error response", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusBadRequest) + fmt.Fprint(w, `{"error":"bad request"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, err := client.Search(context.Background(), []string{"logs-*"}, map[string]interface{}{}) + if err == nil { + t.Fatal("expected error for bad request") + } + }) +} + +func TestSearchMonitorByName(t *testing.T) { + t.Run("found", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := `{ + "hits": { + "total": {"value": 1, "relation": "eq"}, + "hits": [{"_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890", "_source": {"name": "test-monitor"}}] + } + }` + fmt.Fprint(w, resp) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + id, found, err := client.SearchMonitorByName(context.Background(), "test-monitor") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !found { + t.Fatal("expected monitor to be found") + } + if id != "a1b2c3d4-e5f6-7890-abcd-ef1234567890" { + t.Errorf("expected id 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', got %q", id) + } + }) + + t.Run("not found", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := `{ + "hits": { + "total": {"value": 0, "relation": "eq"}, + "hits": [] + } + }` + fmt.Fprint(w, resp) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, found, err := client.SearchMonitorByName(context.Background(), "nonexistent") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if found { + t.Fatal("expected monitor not to be found") + } + }) + + t.Run("error response", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors/_search" { + w.WriteHeader(http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, _, err := client.SearchMonitorByName(context.Background(), "test") + if err == nil { + t.Fatal("expected error for server error") + } + }) +} + +func TestCreateMonitor(t *testing.T) { + t.Run("success", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/_plugins/_alerting/monitors" && r.Method == "POST" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + resp := `{"_id": "b2c3d4e5-f6a7-8901-bcde-f12345678901", "monitor": {"last_update_time": 1718444400000}}` + fmt.Fprint(w, resp) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + id, lastUpdate, err := client.CreateMonitor(context.Background(), map[string]interface{}{"name": "test"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if id != "b2c3d4e5-f6a7-8901-bcde-f12345678901" { + t.Errorf("expected id 'b2c3d4e5-f6a7-8901-bcde-f12345678901', got %q", id) + } + if lastUpdate != 1718444400000 { + t.Errorf("expected last_update_time 1718444400000, got %d", lastUpdate) + } + }) + + t.Run("error", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusBadRequest) + fmt.Fprint(w, `{"error":"bad request"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, _, err := client.CreateMonitor(context.Background(), map[string]interface{}{"name": "test"}) + if err == nil { + t.Fatal("expected error for bad request") + } + }) +} + +func TestGetMonitorByID(t *testing.T) { + t.Run("success", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "GET" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := `{"monitor": {"name": "test-monitor", "enabled": true}}` + fmt.Fprint(w, resp) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + monitor, err := client.GetMonitorByID(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if monitor["name"] != "test-monitor" { + t.Errorf("expected name 'test-monitor', got %v", monitor["name"]) + } + }) + + t.Run("missing monitor field", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + fmt.Fprint(w, `{"other": "data"}`) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, err := client.GetMonitorByID(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890") + if err == nil { + t.Fatal("expected error for missing monitor field") + } + if !strings.Contains(err.Error(), "monitor object not found") { + t.Errorf("unexpected error: %v", err) + } + }) + + t.Run("error response", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusNotFound) + fmt.Fprint(w, `{"error":"not found"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, err := client.GetMonitorByID(context.Background(), "nonexistent") + if err == nil { + t.Fatal("expected error for 404") + } + }) +} + +func TestUpdateMonitor(t *testing.T) { + t.Run("success", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "PUT" { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + resp := `{"_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890", "monitor": {"last_update_time": 1718444400001}}` + fmt.Fprint(w, resp) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + lastUpdate, err := client.UpdateMonitor(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890", map[string]interface{}{"name": "updated"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if lastUpdate != 1718444400001 { + t.Errorf("expected last_update_time 1718444400001, got %d", lastUpdate) + } + }) + + t.Run("error", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + fmt.Fprint(w, `{"error":"internal error"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, err := client.UpdateMonitor(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890", map[string]interface{}{}) + if err == nil { + t.Fatal("expected error for server error") + } + }) +} + +func TestDeleteMonitor(t *testing.T) { + t.Run("success 200", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "DELETE" { + w.WriteHeader(http.StatusOK) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.DeleteMonitor(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + }) + + t.Run("success 204", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.HasPrefix(r.URL.Path, "/_plugins/_alerting/monitors/") && r.Method == "DELETE" { + w.WriteHeader(http.StatusNoContent) + return + } + w.WriteHeader(http.StatusOK) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.DeleteMonitor(context.Background(), "a1b2c3d4-e5f6-7890-abcd-ef1234567890") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + }) + + t.Run("error", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusNotFound) + fmt.Fprint(w, `{"error":"not found"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + err := client.DeleteMonitor(context.Background(), "nonexistent") + if err == nil { + t.Fatal("expected error for 404") + } + }) +} + +func TestWriteAlertEntry(t *testing.T) { + t.Run("success", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + resp := map[string]interface{}{ + "_id": "alert-entry-1", + "_index": "openchoreo-alerts", + "result": "created", + "_shards": map[string]interface{}{"total": 2, "successful": 1, "failed": 0}, + } + json.NewEncoder(w).Encode(resp) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + id, err := client.WriteAlertEntry(context.Background(), map[string]interface{}{ + "ruleName": "test-rule", + "alertTime": "2025-06-15T10:00:00Z", + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if id != "alert-entry-1" { + t.Errorf("expected id 'alert-entry-1', got %q", id) + } + }) + + t.Run("error", func(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusInternalServerError) + fmt.Fprint(w, `{"error":"internal error"}`) + })) + defer server.Close() + + client := newTestClient(t, server.URL) + _, err := client.WriteAlertEntry(context.Background(), map[string]interface{}{}) + if err == nil { + t.Fatal("expected error for server error") + } + }) +} diff --git a/observability-logs-opensearch/internal/opensearch/queries.go b/observability-logs-opensearch/internal/opensearch/queries.go index 67ce2b6..e082ae9 100644 --- a/observability-logs-opensearch/internal/opensearch/queries.go +++ b/observability-logs-opensearch/internal/opensearch/queries.go @@ -40,6 +40,10 @@ func formatDurationForOpenSearch(d string) (string, error) { return "", err } + if parsed <= 0 { + return "", fmt.Errorf("duration must be a positive whole number of minutes or hours: %s", d) + } + switch { case parsed%time.Hour == 0: return fmt.Sprintf("%dh", parsed/time.Hour), nil @@ -362,12 +366,20 @@ func (qb *QueryBuilder) BuildLogAlertingRuleMonitorBody(params AlertingRuleReque if err != nil { return nil, fmt.Errorf("invalid interval format: %w", err) } + if intervalDuration <= 0 || intervalDuration%time.Minute != 0 { + return nil, fmt.Errorf("invalid interval: must be a positive whole number of minutes, got %q", params.Condition.Interval) + } query, err := qb.BuildLogAlertingRuleQuery(params) if err != nil { return nil, fmt.Errorf("failed to build log alerting rule query: %w", err) } + operatorSymbol, err := GetOperatorSymbol(params.Condition.Operator) + if err != nil { + return nil, fmt.Errorf("invalid condition operator: %w", err) + } + monitorBody := MonitorBody{ Type: "monitor", MonitorType: "query_level_monitor", @@ -375,7 +387,7 @@ func (qb *QueryBuilder) BuildLogAlertingRuleMonitorBody(params AlertingRuleReque Enabled: params.Condition.Enabled, Schedule: MonitorSchedule{ Period: MonitorSchedulePeriod{ - Interval: intervalDuration.Minutes(), + Interval: int(intervalDuration.Minutes()), Unit: "MINUTES", }, }, @@ -394,7 +406,7 @@ func (qb *QueryBuilder) BuildLogAlertingRuleMonitorBody(params AlertingRuleReque Severity: "1", Condition: MonitorTriggerCondition{ Script: MonitorTriggerConditionScript{ - Source: fmt.Sprintf("ctx.results[0].hits.total.value %s %s", GetOperatorSymbol(params.Condition.Operator), strconv.FormatFloat(params.Condition.Threshold, 'f', -1, 64)), + Source: fmt.Sprintf("ctx.results[0].hits.total.value %s %s", operatorSymbol, strconv.FormatFloat(params.Condition.Threshold, 'f', -1, 64)), Lang: "painless", }, }, @@ -443,18 +455,18 @@ func (qb *QueryBuilder) BuildLogAlertingRuleMonitorBody(params AlertingRuleReque } // GetOperatorSymbol converts an operator string to its symbol. -func GetOperatorSymbol(operator string) string { +func GetOperatorSymbol(operator string) (string, error) { switch operator { case "gt": - return ">" + return ">", nil case "gte": - return ">=" + return ">=", nil case "lt": - return "<" + return "<", nil case "lte": - return "<=" + return "<=", nil } - return "" + return "", fmt.Errorf("unknown operator: %q", operator) } // ReverseMapOperator converts an operator symbol back to its string name. diff --git a/observability-logs-opensearch/internal/opensearch/queries_test.go b/observability-logs-opensearch/internal/opensearch/queries_test.go new file mode 100644 index 0000000..774345d --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/queries_test.go @@ -0,0 +1,433 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestBuildComponentLogsQueryV1(t *testing.T) { + qb := NewQueryBuilder("logs-") + + t.Run("full params", func(t *testing.T) { + params := ComponentLogsQueryParamsV1{ + StartTime: "2025-06-15T00:00:00Z", + EndTime: "2025-06-15T23:59:59Z", + NamespaceName: "test-ns", + ProjectID: "550e8400-e29b-41d4-a716-446655440003", + ComponentID: "550e8400-e29b-41d4-a716-446655440001", + EnvironmentID: "550e8400-e29b-41d4-a716-446655440002", + SearchPhrase: "error", + LogLevels: []string{"ERROR", "WARN"}, + Limit: 50, + SortOrder: "asc", + } + + query, err := qb.BuildComponentLogsQueryV1(params) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if query["size"] != 50 { + t.Errorf("expected size=50, got %v", query["size"]) + } + + // Verify sort order + sortArr := query["sort"].([]map[string]interface{}) + tsSort := sortArr[0]["@timestamp"].(map[string]interface{}) + if tsSort["order"] != "asc" { + t.Errorf("expected sort order 'asc', got %v", tsSort["order"]) + } + + // Verify query structure + boolQuery := query["query"].(map[string]interface{})["bool"].(map[string]interface{}) + mustConditions := boolQuery["must"].([]map[string]interface{}) + if len(mustConditions) < 4 { + t.Errorf("expected at least 4 must conditions, got %d", len(mustConditions)) + } + }) + + t.Run("minimal params", func(t *testing.T) { + params := ComponentLogsQueryParamsV1{ + StartTime: "2025-06-15T00:00:00Z", + EndTime: "2025-06-15T23:59:59Z", + NamespaceName: "test-ns", + } + + query, err := qb.BuildComponentLogsQueryV1(params) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Default limit + if query["size"] != 100 { + t.Errorf("expected default size=100, got %v", query["size"]) + } + + // Default sort order + sortArr := query["sort"].([]map[string]interface{}) + tsSort := sortArr[0]["@timestamp"].(map[string]interface{}) + if tsSort["order"] != "desc" { + t.Errorf("expected default sort order 'desc', got %v", tsSort["order"]) + } + }) + + t.Run("missing required fields", func(t *testing.T) { + params := ComponentLogsQueryParamsV1{ + StartTime: "2025-06-15T00:00:00Z", + } + + _, err := qb.BuildComponentLogsQueryV1(params) + if err == nil { + t.Error("expected error for missing required fields") + } + }) +} + +func TestBuildWorkflowRunLogsQuery(t *testing.T) { + qb := NewQueryBuilder("logs-") + + t.Run("with step name", func(t *testing.T) { + params := WorkflowRunQueryParams{ + QueryParams: QueryParams{ + StartTime: "2025-06-15T00:00:00Z", + EndTime: "2025-06-15T23:59:59Z", + NamespaceName: "test-ns", + Limit: 50, + SortOrder: "desc", + }, + WorkflowRunID: "run-123", + StepName: "build", + } + + query := qb.BuildWorkflowRunLogsQuery(params) + + boolQuery := query["query"].(map[string]interface{})["bool"].(map[string]interface{}) + mustConditions := boolQuery["must"].([]map[string]interface{}) + + // Should have: pod name wildcard, step name filter, time range, namespace + if len(mustConditions) < 4 { + t.Errorf("expected at least 4 must conditions with step name, got %d", len(mustConditions)) + } + + // Verify must_not conditions exclude init and wait containers + mustNotConditions := boolQuery["must_not"].([]map[string]interface{}) + if len(mustNotConditions) != 2 { + t.Errorf("expected 2 must_not conditions, got %d", len(mustNotConditions)) + } + }) + + t.Run("without step name", func(t *testing.T) { + params := WorkflowRunQueryParams{ + QueryParams: QueryParams{ + StartTime: "2025-06-15T00:00:00Z", + EndTime: "2025-06-15T23:59:59Z", + Limit: 100, + SortOrder: "desc", + }, + WorkflowRunID: "run-456", + } + + query := qb.BuildWorkflowRunLogsQuery(params) + + boolQuery := query["query"].(map[string]interface{})["bool"].(map[string]interface{}) + mustConditions := boolQuery["must"].([]map[string]interface{}) + + // Without step name and without namespace: pod name wildcard + time range only + if len(mustConditions) < 2 { + t.Errorf("expected at least 2 must conditions without step name, got %d", len(mustConditions)) + } + }) + + t.Run("with namespace", func(t *testing.T) { + params := WorkflowRunQueryParams{ + QueryParams: QueryParams{ + StartTime: "2025-06-15T00:00:00Z", + EndTime: "2025-06-15T23:59:59Z", + NamespaceName: "my-ns", + Limit: 100, + SortOrder: "desc", + }, + WorkflowRunID: "run-789", + } + + query := qb.BuildWorkflowRunLogsQuery(params) + + // Marshal and check for the workflows- prefix + queryBytes, _ := json.Marshal(query) + queryStr := string(queryBytes) + if !strings.Contains(queryStr, "workflows-my-ns") { + t.Error("expected namespace to be prefixed with 'workflows-'") + } + }) +} + +func TestGenerateIndices(t *testing.T) { + qb := NewQueryBuilder("logs-") + + t.Run("single day", func(t *testing.T) { + indices, err := qb.GenerateIndices("2025-06-15T00:00:00Z", "2025-06-15T23:59:59Z") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(indices) != 1 { + t.Fatalf("expected 1 index, got %d: %v", len(indices), indices) + } + if indices[0] != "logs-2025-06-15" { + t.Errorf("expected 'logs-2025-06-15', got %q", indices[0]) + } + }) + + t.Run("multi-day span", func(t *testing.T) { + indices, err := qb.GenerateIndices("2025-06-14T00:00:00Z", "2025-06-16T23:59:59Z") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(indices) != 3 { + t.Fatalf("expected 3 indices, got %d: %v", len(indices), indices) + } + expected := []string{"logs-2025-06-14", "logs-2025-06-15", "logs-2025-06-16"} + for i, exp := range expected { + if indices[i] != exp { + t.Errorf("index[%d] = %q, want %q", i, indices[i], exp) + } + } + }) + + t.Run("empty times", func(t *testing.T) { + indices, err := qb.GenerateIndices("", "") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(indices) != 1 || indices[0] != "logs-*" { + t.Errorf("expected ['logs-*'], got %v", indices) + } + }) + + t.Run("invalid time format", func(t *testing.T) { + _, err := qb.GenerateIndices("not-a-time", "also-not-a-time") + if err == nil { + t.Error("expected error for invalid time format") + } + }) +} + +func TestBuildLogAlertingRuleQuery(t *testing.T) { + qb := NewQueryBuilder("logs-") + + params := AlertingRuleRequest{ + Metadata: AlertingRuleMetadata{ + Name: "test-rule", + ComponentUID: "550e8400-e29b-41d4-a716-446655440001", + ProjectUID: "550e8400-e29b-41d4-a716-446655440003", + EnvironmentUID: "550e8400-e29b-41d4-a716-446655440002", + }, + Source: AlertingRuleSource{ + Query: "error", + }, + Condition: AlertingRuleCondition{ + Window: "1h", + }, + } + + query, err := qb.BuildLogAlertingRuleQuery(params) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if query["size"] != 0 { + t.Errorf("expected size=0, got %v", query["size"]) + } + + // Verify the query contains filter conditions + boolQuery := query["query"].(map[string]interface{})["bool"].(map[string]interface{}) + filters := boolQuery["filter"].([]map[string]interface{}) + if len(filters) != 5 { + t.Errorf("expected 5 filter conditions, got %d", len(filters)) + } +} + +func TestBuildLogAlertingRuleMonitorBody(t *testing.T) { + qb := NewQueryBuilder("logs-") + + t.Run("valid params", func(t *testing.T) { + params := AlertingRuleRequest{ + Metadata: AlertingRuleMetadata{ + Name: "test-rule", + Namespace: "test-ns", + ComponentUID: "550e8400-e29b-41d4-a716-446655440001", + ProjectUID: "550e8400-e29b-41d4-a716-446655440003", + EnvironmentUID: "550e8400-e29b-41d4-a716-446655440002", + }, + Source: AlertingRuleSource{ + Query: "error", + }, + Condition: AlertingRuleCondition{ + Enabled: true, + Window: "1h", + Interval: "5m", + Operator: "gt", + Threshold: 10, + }, + } + + body, err := qb.BuildLogAlertingRuleMonitorBody(params) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if body["type"] != "monitor" { + t.Errorf("expected type='monitor', got %v", body["type"]) + } + if body["name"] != "test-rule" { + t.Errorf("expected name='test-rule', got %v", body["name"]) + } + if body["enabled"] != true { + t.Errorf("expected enabled=true, got %v", body["enabled"]) + } + }) + + t.Run("invalid interval", func(t *testing.T) { + params := AlertingRuleRequest{ + Metadata: AlertingRuleMetadata{Name: "test"}, + Source: AlertingRuleSource{Query: "error"}, + Condition: AlertingRuleCondition{ + Window: "1h", + Interval: "invalid", + }, + } + + _, err := qb.BuildLogAlertingRuleMonitorBody(params) + if err == nil { + t.Error("expected error for invalid interval") + } + }) + + t.Run("invalid window", func(t *testing.T) { + params := AlertingRuleRequest{ + Metadata: AlertingRuleMetadata{Name: "test"}, + Source: AlertingRuleSource{Query: "error"}, + Condition: AlertingRuleCondition{ + Window: "invalid", + Interval: "5m", + }, + } + + _, err := qb.BuildLogAlertingRuleMonitorBody(params) + if err == nil { + t.Error("expected error for invalid window") + } + }) +} + +func TestGetOperatorSymbol(t *testing.T) { + tests := []struct { + input string + expected string + wantErr bool + }{ + {"gt", ">", false}, + {"gte", ">=", false}, + {"lt", "<", false}, + {"lte", "<=", false}, + {"unknown", "", true}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + result, err := GetOperatorSymbol(tt.input) + if tt.wantErr { + if err == nil { + t.Errorf("GetOperatorSymbol(%q) expected error, got nil", tt.input) + } + return + } + if err != nil { + t.Errorf("GetOperatorSymbol(%q) unexpected error: %v", tt.input, err) + } + if result != tt.expected { + t.Errorf("GetOperatorSymbol(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +func TestReverseMapOperator(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {">", "gt"}, + {">=", "gte"}, + {"<", "lt"}, + {"<=", "lte"}, + {"unknown", ""}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + result := ReverseMapOperator(tt.input) + if result != tt.expected { + t.Errorf("ReverseMapOperator(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +func TestSanitizeWildcardValue(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {"backslash", `hello\world`, `hello\\world`}, + {"double quote", `hello"world`, `hello\"world`}, + {"asterisk", `hello*world`, `hello\*world`}, + {"question mark", `hello?world`, `hello\?world`}, + {"multiple special chars", `a*b?c\d"e`, `a\*b\?c\\d\"e`}, + {"no special chars", "hello world", "hello world"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := sanitizeWildcardValue(tt.input) + if result != tt.expected { + t.Errorf("sanitizeWildcardValue(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +func TestFormatDurationForOpenSearch(t *testing.T) { + tests := []struct { + name string + input string + expected string + wantErr bool + }{ + {"hours", "2h", "2h", false}, + {"minutes", "30m", "30m", false}, + {"one hour", "1h", "1h", false}, + {"seconds not supported", "30s", "", true}, + {"zero duration", "0m", "", true}, + {"negative duration", "-5m", "", true}, + {"negative hours", "-2h", "", true}, + {"invalid", "invalid", "", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := formatDurationForOpenSearch(tt.input) + if (err != nil) != tt.wantErr { + t.Errorf("formatDurationForOpenSearch(%q) error = %v, wantErr %v", tt.input, err, tt.wantErr) + return + } + if result != tt.expected { + t.Errorf("formatDurationForOpenSearch(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} diff --git a/observability-logs-opensearch/internal/opensearch/types.go b/observability-logs-opensearch/internal/opensearch/types.go index 06ec1ac..a8b8f72 100644 --- a/observability-logs-opensearch/internal/opensearch/types.go +++ b/observability-logs-opensearch/internal/opensearch/types.go @@ -142,7 +142,7 @@ type MonitorSchedule struct { // MonitorSchedulePeriod defines the time period for schedule. type MonitorSchedulePeriod struct { - Interval float64 `json:"interval"` + Interval int `json:"interval"` Unit string `json:"unit"` } diff --git a/observability-logs-opensearch/internal/opensearch/types_test.go b/observability-logs-opensearch/internal/opensearch/types_test.go new file mode 100644 index 0000000..05b1551 --- /dev/null +++ b/observability-logs-opensearch/internal/opensearch/types_test.go @@ -0,0 +1,201 @@ +// Copyright 2026 The OpenChoreo Authors +// SPDX-License-Identifier: Apache-2.0 + +package opensearch + +import ( + "strings" + "testing" + "time" +) + +func TestParseLogEntry(t *testing.T) { + hit := Hit{ + ID: "test-id", + Source: map[string]interface{}{ + "@timestamp": "2025-06-15T10:30:00Z", + "log": "ERROR something went wrong", + "kubernetes": map[string]interface{}{ + "namespace_name": "test-ns", + "pod_id": "pod-123", + "pod_name": "my-pod", + "container_name": "main", + "labels": map[string]interface{}{ + ReplaceDots(ComponentID): "550e8400-e29b-41d4-a716-446655440001", + ReplaceDots(EnvironmentID): "550e8400-e29b-41d4-a716-446655440002", + ReplaceDots(ProjectID): "550e8400-e29b-41d4-a716-446655440003", + ReplaceDots(Version): "v1", + ReplaceDots(VersionID): "ver-123", + ReplaceDots(ComponentName): "my-comp", + ReplaceDots(EnvironmentName): "dev", + ReplaceDots(ProjectName): "my-proj", + ReplaceDots(NamespaceName): "my-ns", + }, + }, + }, + } + + entry := ParseLogEntry(hit) + + expectedTime := time.Date(2025, 6, 15, 10, 30, 0, 0, time.UTC) + if !entry.Timestamp.Equal(expectedTime) { + t.Errorf("expected timestamp %v, got %v", expectedTime, entry.Timestamp) + } + if entry.Log != "ERROR something went wrong" { + t.Errorf("expected log message, got %q", entry.Log) + } + if entry.LogLevel != "ERROR" { + t.Errorf("expected log level ERROR, got %q", entry.LogLevel) + } + if entry.ComponentID != "550e8400-e29b-41d4-a716-446655440001" { + t.Errorf("expected componentId '550e8400-e29b-41d4-a716-446655440001', got %q", entry.ComponentID) + } + if entry.EnvironmentID != "550e8400-e29b-41d4-a716-446655440002" { + t.Errorf("expected environmentId '550e8400-e29b-41d4-a716-446655440002', got %q", entry.EnvironmentID) + } + if entry.ProjectID != "550e8400-e29b-41d4-a716-446655440003" { + t.Errorf("expected projectId '550e8400-e29b-41d4-a716-446655440003', got %q", entry.ProjectID) + } + if entry.Version != "v1" { + t.Errorf("expected version 'v1', got %q", entry.Version) + } + if entry.ComponentName != "my-comp" { + t.Errorf("expected componentName 'my-comp', got %q", entry.ComponentName) + } + if entry.EnvironmentName != "dev" { + t.Errorf("expected environmentName 'dev', got %q", entry.EnvironmentName) + } + if entry.ProjectName != "my-proj" { + t.Errorf("expected projectName 'my-proj', got %q", entry.ProjectName) + } + if entry.NamespaceName != "my-ns" { + t.Errorf("expected namespaceName 'my-ns', got %q", entry.NamespaceName) + } + if entry.Namespace != "test-ns" { + t.Errorf("expected namespace 'test-ns', got %q", entry.Namespace) + } + if entry.PodID != "pod-123" { + t.Errorf("expected podId 'pod-123', got %q", entry.PodID) + } + if entry.PodName != "my-pod" { + t.Errorf("expected podName 'my-pod', got %q", entry.PodName) + } + if entry.ContainerName != "main" { + t.Errorf("expected containerName 'main', got %q", entry.ContainerName) + } + if len(entry.Labels) == 0 { + t.Error("expected labels to be populated") + } +} + +func TestParseLogEntry_EmptySource(t *testing.T) { + hit := Hit{ + ID: "empty-hit", + Source: map[string]interface{}{}, + } + + entry := ParseLogEntry(hit) + + if !entry.Timestamp.IsZero() { + t.Errorf("expected zero timestamp, got %v", entry.Timestamp) + } + if entry.Log != "" { + t.Errorf("expected empty log, got %q", entry.Log) + } + if entry.ComponentID != "" { + t.Errorf("expected empty componentId, got %q", entry.ComponentID) + } +} + +func TestExtractLogLevel(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"2025-01-01 ERROR something failed", "ERROR"}, + {"WARN disk space low", "WARN"}, + {"WARNING: deprecated function", "WARN"}, + {"INFO application started", "INFO"}, + {"DEBUG variable x = 5", "DEBUG"}, + {"FATAL out of memory", "FATAL"}, + {"SEVERE critical failure", "SEVERE"}, + {"just a regular log message", "INFO"}, + {"error in lowercase", "ERROR"}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + result := extractLogLevel(tt.input) + if result != tt.expected { + t.Errorf("extractLogLevel(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +func TestBuildSearchBody(t *testing.T) { + query := map[string]interface{}{ + "size": 10, + "query": map[string]interface{}{ + "match_all": map[string]interface{}{}, + }, + } + + reader := buildSearchBody(query) + if reader == nil { + t.Fatal("expected non-nil reader") + } + + b := make([]byte, 1024) + n, _ := reader.Read(b) + if n == 0 { + t.Error("expected non-empty body") + } + + if !strings.Contains(string(b[:n]), "match_all") { + t.Error("expected body to contain 'match_all'") + } +} + +func TestParseSearchResponse(t *testing.T) { + t.Run("valid JSON", func(t *testing.T) { + jsonStr := `{ + "took": 5, + "timed_out": false, + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "hits": [ + {"_id": "1", "_source": {"log": "test1"}}, + {"_id": "2", "_source": {"log": "test2"}} + ] + } + }` + + resp, err := parseSearchResponse(strings.NewReader(jsonStr)) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if resp.Took != 5 { + t.Errorf("expected took=5, got %d", resp.Took) + } + if resp.TimedOut { + t.Error("expected timed_out=false") + } + if resp.Hits.Total.Value != 2 { + t.Errorf("expected total=2, got %d", resp.Hits.Total.Value) + } + if len(resp.Hits.Hits) != 2 { + t.Errorf("expected 2 hits, got %d", len(resp.Hits.Hits)) + } + }) + + t.Run("invalid JSON", func(t *testing.T) { + _, err := parseSearchResponse(strings.NewReader("not json")) + if err == nil { + t.Error("expected error for invalid JSON") + } + }) +} diff --git a/observability-logs-opensearch/main.go b/observability-logs-opensearch/main.go index 1db693e..2fd4fd7 100644 --- a/observability-logs-opensearch/main.go +++ b/observability-logs-opensearch/main.go @@ -70,18 +70,25 @@ func main() { logsHandler := app.NewLogsHandler(osClient, queryBuilder, observerClient, logger) srv := app.NewServer(cfg.ServerPort, logsHandler, logger) + errCh := make(chan error, 1) go func() { if err := srv.Start(); err != nil { - logger.Error("Server error", slog.Any("error", err)) - os.Exit(1) + errCh <- err } }() quit := make(chan os.Signal, 1) signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) - <-quit - logger.Info("Shutting down gracefully") + var startErr bool + + select { + case err := <-errCh: + logger.Error("Server error", slog.Any("error", err)) + startErr = true + case <-quit: + logger.Info("Shutting down gracefully") + } shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 10*time.Second) defer shutdownCancel() @@ -91,5 +98,9 @@ func main() { os.Exit(1) } + if startErr { + os.Exit(1) + } + logger.Info("Server stopped") }