From 75f94903b36d11b7b47dc3e0ebabab1ce3acabc7 Mon Sep 17 00:00:00 2001 From: Julien <291750+roidelapluie@users.noreply.github.com> Date: Thu, 29 Jan 2026 13:36:13 +0100 Subject: [PATCH] Add OpenAPI 3.2 specification generation for Prometheus HTTP API (#17825) * Add OpenAPI 3.2 specification generation for Prometheus HTTP API This commit introduces an OpenAPI specification for the Prometheus API. After testing multiple code-generation servers with built-in APIs, this implementation uses an independent spec file outside of the critical path. This spec file is tested with a framework present in this pull request. The specification helps clients know which parameters they can use and is served at /api/v1/openapi.yaml. The spec file will evolve with the Prometheus API and has the same version number. Downstream projects can tune the APIs presented in the spec file with configuration options using the IncludePaths setting for path filtering. In the future, there is room to generate a server from this spec file (e.g. with interfaces), but this is out of scope for this pull request. Architecture: - Core OpenAPI infrastructure (openapi.go): Dynamic spec building, caching, and thread-safe spec generation - Schema definitions (openapi_schemas.go): Complete type definitions for all API request and response types - Path specifications (openapi_paths.go): Endpoint definitions with parameters, request bodies, and response schemas - Examples (openapi_examples.go): Realistic request/response examples - Helper functions (openapi_helpers.go): Reusable builders for common OpenAPI structures Testing: - Comprehensive test suite with golden file validation - Test helpers package for API testing infrastructure - OpenAPI compliance validation utilities The golden file captures the complete specification for snapshot testing. Update with: go test -run TestOpenAPIGolden -update-openapi-spec REVIEWERS: The most important thing to check would be the OpenAPI golden file (web/api/v1/testdata/openapi_golden.yaml). Test scenarios are important as they test the actual OpenAPI spec validity. Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> * Add OpenAPI 3.1 support with version selection Add support for both OpenAPI 3.1 and 3.2 specifications with version selection via openapi_version query parameter. Defaults to 3.1 for broader compatibility Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> * Enhance OpenAPI examples and add helper functions - Add timestampExamples helper for consistent time formatting - Add exampleMap helper to simplify example creation - Improve example summaries with query details - Add matrix result example for range vector queries Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> * web/api: Add AtST method to test helper iterators Implement the AtST() method required by chunkenc.Iterator interface for FakeSeriesIterator and FakeHistogramSeriesIterator test helpers. The method returns 0 as these test helpers don't use start timestamps Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> * OpenAPI: Add minimum coverage test Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> * OpenAPI: Improve examples handling Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> --------- Signed-off-by: Julien Pivotto <291750+roidelapluie@users.noreply.github.com> --- .gitattributes | 1 + .golangci.yml | 2 + .yamllint | 1 + docs/querying/api.md | 16 + documentation/examples/remote_storage/go.mod | 2 +- go.mod | 15 +- go.sum | 48 + go.work | 2 +- internal/tools/go.mod | 2 +- web/api/testhelpers/api.go | 244 + web/api/testhelpers/assertions.go | 252 + web/api/testhelpers/fixtures.go | 178 + web/api/testhelpers/mocks.go | 534 +++ web/api/testhelpers/openapi.go | 204 + web/api/testhelpers/request.go | 145 + web/api/v1/api.go | 8 +- web/api/v1/api_scenarios_test.go | 419 ++ web/api/v1/errors_test.go | 1 + web/api/v1/openapi.go | 320 ++ web/api/v1/openapi_coverage_test.go | 258 + web/api/v1/openapi_examples.go | 1013 ++++ web/api/v1/openapi_golden_test.go | 176 + web/api/v1/openapi_helpers.go | 343 ++ web/api/v1/openapi_paths.go | 626 +++ web/api/v1/openapi_schemas.go | 1223 +++++ web/api/v1/openapi_test.go | 289 ++ web/api/v1/test_helpers.go | 157 + web/api/v1/testdata/openapi_3.1_golden.yaml | 4401 +++++++++++++++++ web/api/v1/testdata/openapi_3.2_golden.yaml | 4452 ++++++++++++++++++ web/ui/mantine-ui/src/promql/tools/go.mod | 2 +- web/web.go | 9 + web/web_test.go | 2 + 32 files changed, 15337 insertions(+), 8 deletions(-) create mode 100644 .gitattributes create mode 100644 web/api/testhelpers/api.go create mode 100644 web/api/testhelpers/assertions.go create mode 100644 web/api/testhelpers/fixtures.go create mode 100644 web/api/testhelpers/mocks.go create mode 100644 web/api/testhelpers/openapi.go create mode 100644 web/api/testhelpers/request.go create mode 100644 web/api/v1/api_scenarios_test.go create mode 100644 web/api/v1/openapi.go create mode 100644 web/api/v1/openapi_coverage_test.go create mode 100644 web/api/v1/openapi_examples.go create mode 100644 web/api/v1/openapi_golden_test.go create mode 100644 web/api/v1/openapi_helpers.go create mode 100644 web/api/v1/openapi_paths.go create mode 100644 web/api/v1/openapi_schemas.go create mode 100644 web/api/v1/openapi_test.go create mode 100644 web/api/v1/test_helpers.go create mode 100644 web/api/v1/testdata/openapi_3.1_golden.yaml create mode 100644 web/api/v1/testdata/openapi_3.2_golden.yaml diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..432caee6f7 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +web/api/v1/testdata/openapi_golden.yaml linguist-generated diff --git a/.golangci.yml b/.golangci.yml index 0c866611e9..599a5e2b49 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -124,6 +124,8 @@ linters: # Disable this check for now since it introduces too many changes in our existing codebase. # See https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#hdr-Analyzer_omitzero for more details. - omitzero + # Disable waitgroup check until we really move to Go 1.25. + - waitgroup perfsprint: # Optimizes even if it requires an int or uint type cast. int-conversion: true diff --git a/.yamllint b/.yamllint index 8d09c375fd..b329f464fb 100644 --- a/.yamllint +++ b/.yamllint @@ -2,6 +2,7 @@ extends: default ignore: | **/node_modules + web/api/v1/testdata/openapi_*_golden.yaml rules: braces: diff --git a/docs/querying/api.md b/docs/querying/api.md index 4891db8980..7324669699 100644 --- a/docs/querying/api.md +++ b/docs/querying/api.md @@ -6,6 +6,22 @@ sort_rank: 7 The current stable HTTP API is reachable under `/api/v1` on a Prometheus server. Any non-breaking additions will be added under that endpoint. +## OpenAPI Specification + +An OpenAPI specification for the HTTP API is available at `/api/v1/openapi.yaml`. +By default, it returns OpenAPI 3.1 for broader compatibility. Use `?openapi_version=3.2` +for OpenAPI 3.2, which includes advanced features and endpoints like `/api/v1/notifications/live`. + +This machine-readable specification describes all available endpoints, request parameters, +response formats, and schemas. + +The OpenAPI specification can be used to: + +- Generate client libraries in various programming languages. +- Validate API requests and responses. +- Generate interactive API documentation. +- Test API endpoints. + ## Format overview The API response format is JSON. Every successful API request returns a `2xx` diff --git a/documentation/examples/remote_storage/go.mod b/documentation/examples/remote_storage/go.mod index 17076faddd..5f2cd98037 100644 --- a/documentation/examples/remote_storage/go.mod +++ b/documentation/examples/remote_storage/go.mod @@ -1,6 +1,6 @@ module github.com/prometheus/prometheus/documentation/examples/remote_storage -go 1.24.0 +go 1.25.0 require ( github.com/alecthomas/kingpin/v2 v2.4.0 diff --git a/go.mod b/go.mod index afc3f2740d..0aa3658177 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/prometheus/prometheus -go 1.24.0 +go 1.25.0 require ( github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 @@ -54,6 +54,8 @@ require ( github.com/oklog/ulid/v2 v2.1.1 github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 github.com/ovh/go-ovh v1.9.0 + github.com/pb33f/libopenapi v0.31.1 + github.com/pb33f/libopenapi-validator v0.10.0 github.com/prometheus/alertmanager v0.30.0 github.com/prometheus/client_golang v1.23.2 github.com/prometheus/client_golang/exp v0.0.0-20260101091701-2cd067eb23c9 @@ -85,6 +87,7 @@ require ( go.uber.org/goleak v1.3.0 go.yaml.in/yaml/v2 v2.4.3 go.yaml.in/yaml/v3 v3.0.4 + go.yaml.in/yaml/v4 v4.0.0-rc.3 golang.org/x/oauth2 v0.34.0 golang.org/x/sync v0.19.0 golang.org/x/sys v0.39.0 @@ -93,6 +96,7 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20251222181119-0a764e51fe1b google.golang.org/grpc v1.78.0 google.golang.org/protobuf v1.36.11 + gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.34.3 k8s.io/apimachinery v0.34.3 k8s.io/client-go v0.34.3 @@ -102,6 +106,9 @@ require ( require ( github.com/aws/aws-sdk-go-v2/service/signin v1.0.4 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/basgys/goxml2json v1.1.1-0.20231018121955-e66ee54ceaad // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/go-openapi/swag/cmdutils v0.25.4 // indirect github.com/go-openapi/swag/conv v0.25.4 // indirect github.com/go-openapi/swag/fileutils v0.25.4 // indirect @@ -113,8 +120,10 @@ require ( github.com/go-openapi/swag/stringutils v0.25.4 // indirect github.com/go-openapi/swag/typeutils v0.25.4 // indirect github.com/go-openapi/swag/yamlutils v0.25.4 // indirect + github.com/pb33f/jsonpath v0.7.0 // indirect + github.com/pb33f/ordered-map/v2 v2.3.0 // indirect + github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect go.uber.org/multierr v1.11.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect sigs.k8s.io/structured-merge-diff/v6 v6.3.0 // indirect ) @@ -237,7 +246,7 @@ require ( gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v2 v2.4.0 gotest.tools/v3 v3.0.3 // indirect k8s.io/kube-openapi v0.0.0-20250710124328-f3f2b991d03b // indirect k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect diff --git a/go.sum b/go.sum index 6ac2105275..280724445a 100644 --- a/go.sum +++ b/go.sum @@ -81,6 +81,10 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.41.5 h1:SciGFVNZ4mHdm7gpD1dgZYnCuVdX github.com/aws/aws-sdk-go-v2/service/sts v1.41.5/go.mod h1:iW40X4QBmUxdP+fZNOpfmkdMZqsovezbAeO+Ubiv2pk= github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/basgys/goxml2json v1.1.1-0.20231018121955-e66ee54ceaad h1:3swAvbzgfaI6nKuDDU7BiKfZRdF+h2ZwKgMHd8Ha4t8= +github.com/basgys/goxml2json v1.1.1-0.20231018121955-e66ee54ceaad/go.mod h1:9+nBLYNWkvPcq9ep0owWUsPTLgL9ZXTsZWcCSVGGLJ0= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 h1:6df1vn4bBlDDo4tARvBm7l6KA9iVMnE3NWizDeWSrps= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3/go.mod h1:CIWtjkly68+yqLPbvwwR/fjNJA/idrtULjZWh2v1ys0= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -88,6 +92,10 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bitly/go-simplejson v0.5.1 h1:xgwPbetQScXt1gh9BmoJ6j9JMr3TElvuIyjR8pgdoow= +github.com/bitly/go-simplejson v0.5.1/go.mod h1:YOPVLzCfwK14b4Sff3oP1AmGhI9T9Vsg84etUnlyp+Q= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -116,6 +124,8 @@ github.com/digitalocean/godo v1.171.0 h1:QwpkwWKr3v7yxc8D4NQG973NoR9APCEWjYnLOQe github.com/digitalocean/godo v1.171.0/go.mod h1:xQsWpVCCbkDrWisHA72hPzPlnC+4W5w/McZY5ij9uvU= github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ= +github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM= github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= @@ -437,6 +447,14 @@ github.com/ovh/go-ovh v1.9.0/go.mod h1:cTVDnl94z4tl8pP1uZ/8jlVxntjSIf09bNcQ5TJSC github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pb33f/jsonpath v0.7.0 h1:3oG6yu1RqNoMZpqnRjBMqi8fSIXWoDAKDrsB0QGTcoU= +github.com/pb33f/jsonpath v0.7.0/go.mod h1:/+JlSIjWA2ijMVYGJ3IQPF4Q1nLMYbUTYNdk0exCDPQ= +github.com/pb33f/libopenapi v0.31.1 h1:smGr45U2Y+hHWYKiEV13oS2tP9IUnscqNb5qsvT9+YI= +github.com/pb33f/libopenapi v0.31.1/go.mod h1:oaebeA5l58AFbZ7qRKTtMnu15JEiPlaBas1vLDcw9vs= +github.com/pb33f/libopenapi-validator v0.10.0 h1:9XhgxW2jTDd+1aDMuIjGUsWaeUaPi5ql2z1Y+WBltiE= +github.com/pb33f/libopenapi-validator v0.10.0/go.mod h1:hW3wIpg4YCxLrJxyTrfrzP9Mtt9FvbD/nm0yemUcjSs= +github.com/pb33f/ordered-map/v2 v2.3.0 h1:k2OhVEQkhTCQMhAicQ3Z6iInzoZNQ7L9MVomwKBZ5WQ= +github.com/pb33f/ordered-map/v2 v2.3.0/go.mod h1:oe5ue+6ZNhy7QN9cPZvPA23Hx0vMHnNVeMg4fGdCANw= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= @@ -491,6 +509,8 @@ github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPK github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ= +github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU= github.com/scaleway/scaleway-sdk-go v1.0.0-beta.36 h1:ObX9hZmK+VmijreZO/8x9pQ8/P/ToHD/bdSb4Eg4tUo= github.com/scaleway/scaleway-sdk-go v1.0.0-beta.36/go.mod h1:LEsDu4BubxK7/cWhtlQWfuxwL4rf/2UEpxXz1o1EMtM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= @@ -517,6 +537,7 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= @@ -533,6 +554,7 @@ github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8 github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.mongodb.org/mongo-driver v1.17.6 h1:87JUG1wZfWsr6rIz3ZmpH90rL5tea7O3IHuSwHUpsss= go.mongodb.org/mongo-driver v1.17.6/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= @@ -620,12 +642,16 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= golang.org/x/exp v0.0.0-20250808145144-a408d31f581a h1:Y+7uR/b1Mw2iSXZ3G//1haIiSElDQZ8KWh0h+sZPG90= golang.org/x/exp v0.0.0-20250808145144-a408d31f581a/go.mod h1:rT6SFzZ7oxADUDx58pcaKFTcZ+inxAa9fTrYx/uVYwg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk= golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -638,6 +664,10 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= @@ -648,6 +678,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -667,23 +699,37 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI= @@ -694,6 +740,8 @@ golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ= golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ= golang.org/x/tools/godoc v0.1.0-deprecated h1:o+aZ1BOj6Hsx/GBdJO/s815sqftjSnrZZwyYTHODvtk= diff --git a/go.work b/go.work index fbb73655e9..c5ba5dfad6 100644 --- a/go.work +++ b/go.work @@ -1,4 +1,4 @@ -go 1.24.0 +go 1.25.0 use ( . diff --git a/internal/tools/go.mod b/internal/tools/go.mod index c8b62b5ca7..5238fca024 100644 --- a/internal/tools/go.mod +++ b/internal/tools/go.mod @@ -1,6 +1,6 @@ module github.com/prometheus/prometheus/internal/tools -go 1.24.0 +go 1.25.0 require ( github.com/bufbuild/buf v1.62.1 diff --git a/web/api/testhelpers/api.go b/web/api/testhelpers/api.go new file mode 100644 index 0000000000..07d7003b5c --- /dev/null +++ b/web/api/testhelpers/api.go @@ -0,0 +1,244 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package testhelpers provides utilities for testing the Prometheus HTTP API. +// This file contains helper functions for creating test API instances and managing test lifecycles. +package testhelpers + +import ( + "context" + "log/slog" + "net/http" + "net/url" + "testing" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/common/promslog" + + "github.com/prometheus/prometheus/config" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql" + "github.com/prometheus/prometheus/promql/promqltest" + "github.com/prometheus/prometheus/rules" + "github.com/prometheus/prometheus/scrape" + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb" + "github.com/prometheus/prometheus/util/notifications" +) + +// RulesRetriever provides a list of active rules and alerts. +type RulesRetriever interface { + RuleGroups() []*rules.Group + AlertingRules() []*rules.AlertingRule +} + +// TargetRetriever provides the list of active/dropped targets to scrape or not. +type TargetRetriever interface { + TargetsActive() map[string][]*scrape.Target + TargetsDropped() map[string][]*scrape.Target + TargetsDroppedCounts() map[string]int + ScrapePoolConfig(string) (*config.ScrapeConfig, error) +} + +// ScrapePoolsRetriever provide the list of all scrape pools. +type ScrapePoolsRetriever interface { + ScrapePools() []string +} + +// AlertmanagerRetriever provides a list of all/dropped AlertManager URLs. +type AlertmanagerRetriever interface { + Alertmanagers() []*url.URL + DroppedAlertmanagers() []*url.URL +} + +// TSDBAdminStats provides TSDB admin statistics. +type TSDBAdminStats interface { + CleanTombstones() error + Delete(ctx context.Context, mint, maxt int64, ms ...*labels.Matcher) error + Snapshot(dir string, withHead bool) error + Stats(statsByLabelName string, limit int) (*tsdb.Stats, error) + WALReplayStatus() (tsdb.WALReplayStatus, error) + BlockMetas() ([]tsdb.BlockMeta, error) +} + +// APIConfig holds configuration for creating a test API instance. +type APIConfig struct { + // Core dependencies. + QueryEngine *LazyLoader[promql.QueryEngine] + Queryable *LazyLoader[storage.SampleAndChunkQueryable] + ExemplarQueryable *LazyLoader[storage.ExemplarQueryable] + + // Retrievers. + RulesRetriever *LazyLoader[RulesRetriever] + TargetRetriever *LazyLoader[TargetRetriever] + ScrapePoolsRetriever *LazyLoader[ScrapePoolsRetriever] + AlertmanagerRetriever *LazyLoader[AlertmanagerRetriever] + + // Admin. + TSDBAdmin *LazyLoader[TSDBAdminStats] + DBDir string + + // Optional overrides. + Config func() config.Config + FlagsMap map[string]string + Now func() time.Time +} + +// APIWrapper wraps the API and provides a handler for testing. +type APIWrapper struct { + Handler http.Handler +} + +// PrometheusVersion contains build information about Prometheus. +type PrometheusVersion struct { + Version string `json:"version"` + Revision string `json:"revision"` + Branch string `json:"branch"` + BuildUser string `json:"buildUser"` + BuildDate string `json:"buildDate"` + GoVersion string `json:"goVersion"` +} + +// RuntimeInfo contains runtime information about Prometheus. +type RuntimeInfo struct { + StartTime time.Time `json:"startTime"` + CWD string `json:"CWD"` + Hostname string `json:"hostname"` + ServerTime time.Time `json:"serverTime"` + ReloadConfigSuccess bool `json:"reloadConfigSuccess"` + LastConfigTime time.Time `json:"lastConfigTime"` + CorruptionCount int64 `json:"corruptionCount"` + GoroutineCount int `json:"goroutineCount"` + GOMAXPROCS int `json:"GOMAXPROCS"` + GOMEMLIMIT int64 `json:"GOMEMLIMIT"` + GOGC string `json:"GOGC"` + GODEBUG string `json:"GODEBUG"` + StorageRetention string `json:"storageRetention"` +} + +// NewAPIParams holds all the parameters needed to create a v1.API instance. +type NewAPIParams struct { + QueryEngine promql.QueryEngine + Queryable storage.SampleAndChunkQueryable + ExemplarQueryable storage.ExemplarQueryable + ScrapePoolsRetriever func(context.Context) ScrapePoolsRetriever + TargetRetriever func(context.Context) TargetRetriever + AlertmanagerRetriever func(context.Context) AlertmanagerRetriever + ConfigFunc func() config.Config + FlagsMap map[string]string + ReadyFunc func(http.HandlerFunc) http.HandlerFunc + TSDBAdmin TSDBAdminStats + DBDir string + Logger *slog.Logger + RulesRetriever func(context.Context) RulesRetriever + RuntimeInfoFunc func() (RuntimeInfo, error) + BuildInfo *PrometheusVersion + NotificationsGetter func() []notifications.Notification + NotificationsSub func() (<-chan notifications.Notification, func(), bool) + Gatherer prometheus.Gatherer + Registerer prometheus.Registerer +} + +// PrepareAPI creates a NewAPIParams with sensible defaults for testing. +func PrepareAPI(t *testing.T, cfg APIConfig) NewAPIParams { + t.Helper() + + // Create defaults for unset lazy loaders. + if cfg.QueryEngine == nil { + cfg.QueryEngine = NewLazyLoader(func() promql.QueryEngine { + return promqltest.NewTestEngineWithOpts(t, promql.EngineOpts{ + Logger: nil, + Reg: nil, + MaxSamples: 10000, + Timeout: 100 * time.Second, + NoStepSubqueryIntervalFn: func(int64) int64 { return 60 * 1000 }, + EnableAtModifier: true, + EnableNegativeOffset: true, + EnablePerStepStats: true, + }) + }) + } + + if cfg.Queryable == nil { + cfg.Queryable = NewLazyLoader(NewEmptyQueryable) + } + + if cfg.ExemplarQueryable == nil { + cfg.ExemplarQueryable = NewLazyLoader(NewEmptyExemplarQueryable) + } + + if cfg.RulesRetriever == nil { + cfg.RulesRetriever = NewLazyLoader(func() RulesRetriever { + return NewEmptyRulesRetriever() + }) + } + + if cfg.TargetRetriever == nil { + cfg.TargetRetriever = NewLazyLoader(func() TargetRetriever { + return NewEmptyTargetRetriever() + }) + } + + if cfg.ScrapePoolsRetriever == nil { + cfg.ScrapePoolsRetriever = NewLazyLoader(func() ScrapePoolsRetriever { + return NewEmptyScrapePoolsRetriever() + }) + } + + if cfg.AlertmanagerRetriever == nil { + cfg.AlertmanagerRetriever = NewLazyLoader(func() AlertmanagerRetriever { + return NewEmptyAlertmanagerRetriever() + }) + } + + if cfg.TSDBAdmin == nil { + cfg.TSDBAdmin = NewLazyLoader(func() TSDBAdminStats { + return NewEmptyTSDBAdminStats() + }) + } + + if cfg.Config == nil { + cfg.Config = func() config.Config { return config.Config{} } + } + + if cfg.FlagsMap == nil { + cfg.FlagsMap = map[string]string{} + } + + if cfg.DBDir == "" { + cfg.DBDir = t.TempDir() + } + + return NewAPIParams{ + QueryEngine: cfg.QueryEngine.Get(), + Queryable: cfg.Queryable.Get(), + ExemplarQueryable: cfg.ExemplarQueryable.Get(), + ScrapePoolsRetriever: func(context.Context) ScrapePoolsRetriever { return cfg.ScrapePoolsRetriever.Get() }, + TargetRetriever: func(context.Context) TargetRetriever { return cfg.TargetRetriever.Get() }, + AlertmanagerRetriever: func(context.Context) AlertmanagerRetriever { return cfg.AlertmanagerRetriever.Get() }, + ConfigFunc: cfg.Config, + FlagsMap: cfg.FlagsMap, + ReadyFunc: func(f http.HandlerFunc) http.HandlerFunc { return f }, + TSDBAdmin: cfg.TSDBAdmin.Get(), + DBDir: cfg.DBDir, + Logger: promslog.NewNopLogger(), + RulesRetriever: func(context.Context) RulesRetriever { return cfg.RulesRetriever.Get() }, + RuntimeInfoFunc: func() (RuntimeInfo, error) { return RuntimeInfo{}, nil }, + BuildInfo: &PrometheusVersion{}, + NotificationsGetter: func() []notifications.Notification { return nil }, + NotificationsSub: func() (<-chan notifications.Notification, func(), bool) { return nil, func() {}, false }, + Gatherer: prometheus.NewRegistry(), + Registerer: prometheus.NewRegistry(), + } +} diff --git a/web/api/testhelpers/assertions.go b/web/api/testhelpers/assertions.go new file mode 100644 index 0000000000..53010b08b5 --- /dev/null +++ b/web/api/testhelpers/assertions.go @@ -0,0 +1,252 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file provides assertion helpers for validating API responses in tests. +package testhelpers + +import ( + "fmt" + "slices" + "strings" + + "github.com/stretchr/testify/require" +) + +// RequireSuccess asserts that the response has status "success" and returns the response for chaining. +func (r *Response) RequireSuccess() *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + require.Equal(r.t, "success", r.JSON["status"], "expected status to be 'success'") + return r +} + +// RequireError asserts that the response has status "error" and returns the response for chaining. +func (r *Response) RequireError() *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + require.Equal(r.t, "error", r.JSON["status"], "expected status to be 'error'") + return r +} + +// RequireStatusCode asserts that the response has the given HTTP status code and returns the response for chaining. +func (r *Response) RequireStatusCode(expectedCode int) *Response { + r.t.Helper() + require.Equal(r.t, expectedCode, r.StatusCode, "unexpected HTTP status code") + return r +} + +// RequireJSONPathExists asserts that a JSON path exists and returns the response for chaining. +func (r *Response) RequireJSONPathExists(path string) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + return r +} + +// RequireEquals asserts that a JSON path equals the expected value and returns the response for chaining. +func (r *Response) RequireEquals(path string, expected any) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + require.Equal(r.t, expected, value, "JSON path %q has unexpected value", path) + return r +} + +// RequireJSONArray asserts that a JSON path contains an array and returns the response for chaining. +func (r *Response) RequireJSONArray(path string) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + _, ok := value.([]any) + require.True(r.t, ok, "JSON path %q is not an array", path) + return r +} + +// RequireLenAtLeast asserts that a JSON path contains an array with at least minLen elements and returns the response for chaining. +func (r *Response) RequireLenAtLeast(path string, minLen int) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + arr, ok := value.([]any) + require.True(r.t, ok, "JSON path %q is not an array", path) + require.GreaterOrEqual(r.t, len(arr), minLen, "JSON path %q has fewer than %d elements", path, minLen) + return r +} + +// RequireArrayContains asserts that a JSON path contains an array with the expected element and returns the response for chaining. +func (r *Response) RequireArrayContains(path string, expected any) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + arr, ok := value.([]any) + require.True(r.t, ok, "JSON path %q is not an array", path) + + found := slices.Contains(arr, expected) + require.True(r.t, found, "JSON path %q does not contain expected value %v", path, expected) + return r +} + +// RequireSome asserts that at least one element in an array satisfies the predicate and returns the response for chaining. +func (r *Response) RequireSome(path string, predicate func(any) bool) *Response { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + arr, ok := value.([]any) + require.True(r.t, ok, "JSON path %q is not an array", path) + + found := slices.ContainsFunc(arr, predicate) + require.True(r.t, found, "no element in JSON path %q satisfies the predicate", path) + return r +} + +// getJSONPath extracts a value from a JSON object using a simple path notation. +// Supports paths like "$.data", "$.data.groups", "$.data.groups[0]". +func getJSONPath(data map[string]any, path string) any { + // Remove leading "$." if present. + path = strings.TrimPrefix(path, "$.") + + if path == "" { + return data + } + + parts := strings.Split(path, ".") + current := any(data) + + for _, part := range parts { + // Handle array indexing (e.g., "groups[0]"). + if strings.Contains(part, "[") { + // Not implementing array indexing for simplicity. + // Tests should use direct field access or RequireSome. + return nil + } + + // Navigate to the next level. + m, ok := current.(map[string]any) + if !ok { + return nil + } + current = m[part] + } + + return current +} + +// RequireVectorResult is a convenience helper for checking vector query results. +func (r *Response) RequireVectorResult() *Response { + r.t.Helper() + return r.RequireSuccess().RequireEquals("$.data.resultType", "vector") +} + +// RequireMatrixResult is a convenience helper for checking matrix query results. +func (r *Response) RequireMatrixResult() *Response { + r.t.Helper() + return r.RequireSuccess().RequireEquals("$.data.resultType", "matrix") +} + +// RequireScalarResult is a convenience helper for checking scalar query results. +func (r *Response) RequireScalarResult() *Response { + r.t.Helper() + return r.RequireSuccess().RequireEquals("$.data.resultType", "scalar") +} + +// RequireRulesGroupNamed asserts that a rules response contains a group with the given name. +func (r *Response) RequireRulesGroupNamed(name string) *Response { + r.t.Helper() + return r.RequireSuccess().RequireSome("$.data.groups", func(group any) bool { + if g, ok := group.(map[string]any); ok { + return g["name"] == name + } + return false + }) +} + +// RequireTargetCount asserts that a targets response contains at least n targets. +func (r *Response) RequireTargetCount(minCount int) *Response { + r.t.Helper() + r.RequireSuccess() + + // The targets endpoint returns activeTargets as an array of targets. + value := getJSONPath(r.JSON, "$.data.activeTargets") + require.NotNil(r.t, value, "JSON path $.data.activeTargets does not exist") + + arr, ok := value.([]any) + require.True(r.t, ok, "$.data.activeTargets is not an array") + require.GreaterOrEqual(r.t, len(arr), minCount, "expected at least %d targets, got %d", minCount, len(arr)) + return r +} + +// DebugJSON is a helper for debugging JSON responses in tests. +func (r *Response) DebugJSON() *Response { + r.t.Helper() + r.t.Logf("Response status code: %d", r.StatusCode) + r.t.Logf("Response body: %s", r.Body) + if r.JSON != nil { + r.t.Logf("Response JSON: %+v", r.JSON) + } + return r +} + +// RequireContainsSubstring asserts that the response body contains the given substring. +func (r *Response) RequireContainsSubstring(substring string) *Response { + r.t.Helper() + require.Contains(r.t, r.Body, substring, "response body does not contain expected substring") + return r +} + +// RequireField asserts that a field exists at the given path and returns its value. +// Note: This method cannot be chained further since it returns the field value, not the Response. +func (r *Response) RequireField(path string) any { + r.t.Helper() + require.NotNil(r.t, r.JSON, "response body is not JSON") + + value := getJSONPath(r.JSON, path) + require.NotNil(r.t, value, "JSON path %q does not exist", path) + return value +} + +// RequireFieldType asserts that a field exists and has the expected type. +func (r *Response) RequireFieldType(path, expectedType string) *Response { + r.t.Helper() + value := r.RequireField(path) + + var actualType string + switch value.(type) { + case string: + actualType = "string" + case float64: + actualType = "number" + case bool: + actualType = "bool" + case []any: + actualType = "array" + case map[string]any: + actualType = "object" + default: + actualType = fmt.Sprintf("%T", value) + } + + require.Equal(r.t, expectedType, actualType, "JSON path %q has unexpected type", path) + return r +} diff --git a/web/api/testhelpers/fixtures.go b/web/api/testhelpers/fixtures.go new file mode 100644 index 0000000000..caa5afd59d --- /dev/null +++ b/web/api/testhelpers/fixtures.go @@ -0,0 +1,178 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file provides test fixture data for API tests. +package testhelpers + +import ( + "time" + + "github.com/prometheus/prometheus/model/histogram" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql" + "github.com/prometheus/prometheus/promql/parser" + "github.com/prometheus/prometheus/rules" + "github.com/prometheus/prometheus/storage" +) + +// FixtureSeries creates a simple series with the "up" metric. +func FixtureSeries() []storage.Series { + // Use timestamps relative to "now" so queries work. + now := time.Now().UnixMilli() + return []storage.Series{ + &FakeSeries{ + labels: labels.FromStrings("__name__", "up", "job", "prometheus", "instance", "localhost:9090"), + samples: []promql.FPoint{ + {T: now - 120000, F: 1}, + {T: now - 60000, F: 1}, + {T: now, F: 1}, + }, + }, + } +} + +// FixtureMultipleSeries creates multiple series for testing. +func FixtureMultipleSeries() []storage.Series { + // Use timestamps relative to "now" so queries work. + now := time.Now().UnixMilli() + return []storage.Series{ + &FakeSeries{ + labels: labels.FromStrings("__name__", "up", "job", "prometheus", "instance", "localhost:9090"), + samples: []promql.FPoint{ + {T: now - 60000, F: 1}, + {T: now, F: 1}, + }, + }, + &FakeSeries{ + labels: labels.FromStrings("__name__", "up", "job", "node", "instance", "localhost:9100"), + samples: []promql.FPoint{ + {T: now - 60000, F: 1}, + {T: now, F: 0}, + }, + }, + &FakeSeries{ + labels: labels.FromStrings("__name__", "http_requests_total", "job", "api", "instance", "localhost:8080"), + samples: []promql.FPoint{ + {T: now - 60000, F: 100}, + {T: now, F: 150}, + }, + }, + } +} + +// FixtureRuleGroups creates a simple set of rule groups for testing. +func FixtureRuleGroups() []*rules.Group { + // Create a simple recording rule. + expr, _ := parser.ParseExpr("up == 1") + recordingRule := rules.NewRecordingRule( + "job:up:sum", + expr, + labels.EmptyLabels(), + ) + + // Create a simple alerting rule. + alertExpr, _ := parser.ParseExpr("up == 0") + alertingRule := rules.NewAlertingRule( + "InstanceDown", + alertExpr, + time.Minute, + 0, + labels.FromStrings("severity", "critical"), + labels.EmptyLabels(), + labels.EmptyLabels(), + "Instance {{ $labels.instance }} is down", + true, + nil, + ) + + // Create a rule group. + group := rules.NewGroup(rules.GroupOptions{ + Name: "example", + File: "example.rules", + Interval: time.Minute, + Rules: []rules.Rule{ + recordingRule, + alertingRule, + }, + }) + + return []*rules.Group{group} +} + +// FixtureEmptyRuleGroups returns an empty set of rule groups. +func FixtureEmptyRuleGroups() []*rules.Group { + return []*rules.Group{} +} + +// FixtureSingleSeries creates a single series for simple tests. +func FixtureSingleSeries(metricName string, value float64) []storage.Series { + return []storage.Series{ + &FakeSeries{ + labels: labels.FromStrings("__name__", metricName), + samples: []promql.FPoint{ + {T: 0, F: value}, + }, + }, + } +} + +// FixtureHistogramSeries creates a series with native histogram data. +func FixtureHistogramSeries() []storage.Series { + // Use timestamps relative to "now" so queries work. + now := time.Now().UnixMilli() + return []storage.Series{ + &FakeHistogramSeries{ + labels: labels.FromStrings("__name__", "test_histogram", "job", "prometheus", "instance", "localhost:9090"), + histograms: []promql.HPoint{ + { + T: now - 60000, + H: &histogram.FloatHistogram{ + Schema: 2, + ZeroThreshold: 0.001, + ZeroCount: 5, + Count: 50, + Sum: 100, + PositiveSpans: []histogram.Span{ + {Offset: 0, Length: 2}, + {Offset: 1, Length: 2}, + }, + NegativeSpans: []histogram.Span{ + {Offset: 0, Length: 1}, + }, + PositiveBuckets: []float64{5, 10, 8, 7}, + NegativeBuckets: []float64{3}, + }, + }, + { + T: now, + H: &histogram.FloatHistogram{ + Schema: 2, + ZeroThreshold: 0.001, + ZeroCount: 8, + Count: 60, + Sum: 120, + PositiveSpans: []histogram.Span{ + {Offset: 0, Length: 2}, + {Offset: 1, Length: 2}, + }, + NegativeSpans: []histogram.Span{ + {Offset: 0, Length: 1}, + }, + PositiveBuckets: []float64{6, 12, 10, 9}, + NegativeBuckets: []float64{4}, + }, + }, + }, + }, + } +} diff --git a/web/api/testhelpers/mocks.go b/web/api/testhelpers/mocks.go new file mode 100644 index 0000000000..527febb727 --- /dev/null +++ b/web/api/testhelpers/mocks.go @@ -0,0 +1,534 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file contains mock implementations of API dependencies for testing. +package testhelpers + +import ( + "context" + "net/url" + + "github.com/prometheus/prometheus/config" + "github.com/prometheus/prometheus/model/exemplar" + "github.com/prometheus/prometheus/model/histogram" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql" + "github.com/prometheus/prometheus/rules" + "github.com/prometheus/prometheus/scrape" + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb" + "github.com/prometheus/prometheus/tsdb/chunkenc" + "github.com/prometheus/prometheus/tsdb/chunks" + "github.com/prometheus/prometheus/util/annotations" +) + +// LazyLoader allows lazy initialization of mocks per test. +type LazyLoader[T any] struct { + loader func() T + value *T +} + +// NewLazyLoader creates a new LazyLoader with the given loader function. +func NewLazyLoader[T any](loader func() T) *LazyLoader[T] { + return &LazyLoader[T]{loader: loader} +} + +// Get returns the loaded value, initializing it if necessary. +func (l *LazyLoader[T]) Get() T { + if l.value == nil { + v := l.loader() + l.value = &v + } + return *l.value +} + +// FakeQueryable implements storage.SampleAndChunkQueryable with configurable behavior. +type FakeQueryable struct { + series []storage.Series +} + +func (f *FakeQueryable) Querier(_, _ int64) (storage.Querier, error) { + return &FakeQuerier{series: f.series}, nil +} + +func (f *FakeQueryable) ChunkQuerier(_, _ int64) (storage.ChunkQuerier, error) { + return &FakeChunkQuerier{series: f.series}, nil +} + +// FakeQuerier implements storage.Querier. +type FakeQuerier struct { + series []storage.Series +} + +func (f *FakeQuerier) Select(_ context.Context, _ bool, _ *storage.SelectHints, _ ...*labels.Matcher) storage.SeriesSet { + return &FakeSeriesSet{series: f.series, idx: -1} +} + +func (f *FakeQuerier) LabelValues(_ context.Context, name string, _ *storage.LabelHints, _ ...*labels.Matcher) ([]string, annotations.Annotations, error) { + valuesMap := make(map[string]struct{}) + for _, s := range f.series { + lbls := s.Labels() + if val := lbls.Get(name); val != "" { + valuesMap[val] = struct{}{} + } + } + values := make([]string, 0, len(valuesMap)) + for v := range valuesMap { + values = append(values, v) + } + return values, nil, nil +} + +func (f *FakeQuerier) LabelNames(_ context.Context, _ *storage.LabelHints, _ ...*labels.Matcher) ([]string, annotations.Annotations, error) { + namesMap := make(map[string]struct{}) + for _, s := range f.series { + lbls := s.Labels() + lbls.Range(func(l labels.Label) { + namesMap[l.Name] = struct{}{} + }) + } + names := make([]string, 0, len(namesMap)) + for n := range namesMap { + names = append(names, n) + } + return names, nil, nil +} + +func (*FakeQuerier) Close() error { + return nil +} + +// FakeChunkQuerier implements storage.ChunkQuerier. +type FakeChunkQuerier struct { + series []storage.Series +} + +func (f *FakeChunkQuerier) Select(_ context.Context, _ bool, _ *storage.SelectHints, _ ...*labels.Matcher) storage.ChunkSeriesSet { + return &FakeChunkSeriesSet{series: f.series, idx: -1} +} + +func (f *FakeChunkQuerier) LabelValues(_ context.Context, name string, _ *storage.LabelHints, _ ...*labels.Matcher) ([]string, annotations.Annotations, error) { + valuesMap := make(map[string]struct{}) + for _, s := range f.series { + lbls := s.Labels() + if val := lbls.Get(name); val != "" { + valuesMap[val] = struct{}{} + } + } + values := make([]string, 0, len(valuesMap)) + for v := range valuesMap { + values = append(values, v) + } + return values, nil, nil +} + +func (f *FakeChunkQuerier) LabelNames(_ context.Context, _ *storage.LabelHints, _ ...*labels.Matcher) ([]string, annotations.Annotations, error) { + namesMap := make(map[string]struct{}) + for _, s := range f.series { + lbls := s.Labels() + lbls.Range(func(l labels.Label) { + namesMap[l.Name] = struct{}{} + }) + } + names := make([]string, 0, len(namesMap)) + for n := range namesMap { + names = append(names, n) + } + return names, nil, nil +} + +func (*FakeChunkQuerier) Close() error { + return nil +} + +// FakeSeriesSet implements storage.SeriesSet. +type FakeSeriesSet struct { + series []storage.Series + idx int +} + +func (f *FakeSeriesSet) Next() bool { + f.idx++ + return f.idx < len(f.series) +} + +func (f *FakeSeriesSet) At() storage.Series { + return f.series[f.idx] +} + +func (*FakeSeriesSet) Err() error { + return nil +} + +func (*FakeSeriesSet) Warnings() annotations.Annotations { + return nil +} + +// FakeChunkSeriesSet implements storage.ChunkSeriesSet. +type FakeChunkSeriesSet struct { + series []storage.Series + idx int +} + +func (f *FakeChunkSeriesSet) Next() bool { + f.idx++ + return f.idx < len(f.series) +} + +func (f *FakeChunkSeriesSet) At() storage.ChunkSeries { + return &FakeChunkSeries{series: f.series[f.idx]} +} + +func (*FakeChunkSeriesSet) Err() error { + return nil +} + +func (*FakeChunkSeriesSet) Warnings() annotations.Annotations { + return nil +} + +// FakeChunkSeries implements storage.ChunkSeries. +type FakeChunkSeries struct { + series storage.Series +} + +func (f *FakeChunkSeries) Labels() labels.Labels { + return f.series.Labels() +} + +func (*FakeChunkSeries) Iterator(_ chunks.Iterator) chunks.Iterator { + return &FakeChunkSeriesIterator{} +} + +// FakeChunkSeriesIterator implements chunks.Iterator. +type FakeChunkSeriesIterator struct{} + +func (*FakeChunkSeriesIterator) Next() bool { + return false +} + +func (*FakeChunkSeriesIterator) At() chunks.Meta { + return chunks.Meta{} +} + +func (*FakeChunkSeriesIterator) Err() error { + return nil +} + +// FakeSeries implements storage.Series. +type FakeSeries struct { + labels labels.Labels + samples []promql.FPoint +} + +func (f *FakeSeries) Labels() labels.Labels { + return f.labels +} + +func (f *FakeSeries) Iterator(chunkenc.Iterator) chunkenc.Iterator { + return &FakeSeriesIterator{samples: f.samples, idx: -1} +} + +// FakeSeriesIterator implements chunkenc.Iterator. +type FakeSeriesIterator struct { + samples []promql.FPoint + idx int +} + +func (f *FakeSeriesIterator) Next() chunkenc.ValueType { + f.idx++ + if f.idx < len(f.samples) { + return chunkenc.ValFloat + } + return chunkenc.ValNone +} + +func (f *FakeSeriesIterator) Seek(t int64) chunkenc.ValueType { + for f.idx < len(f.samples)-1 { + f.idx++ + if f.samples[f.idx].T >= t { + return chunkenc.ValFloat + } + } + return chunkenc.ValNone +} + +func (f *FakeSeriesIterator) At() (int64, float64) { + s := f.samples[f.idx] + return s.T, s.F +} + +func (*FakeSeriesIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) { + panic("not implemented") +} + +func (*FakeSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) { + panic("not implemented") +} + +func (f *FakeSeriesIterator) AtT() int64 { + return f.samples[f.idx].T +} + +func (*FakeSeriesIterator) AtST() int64 { + return 0 +} + +func (*FakeSeriesIterator) Err() error { + return nil +} + +// FakeHistogramSeries implements storage.Series for histogram data. +type FakeHistogramSeries struct { + labels labels.Labels + histograms []promql.HPoint +} + +func (f *FakeHistogramSeries) Labels() labels.Labels { + return f.labels +} + +func (f *FakeHistogramSeries) Iterator(chunkenc.Iterator) chunkenc.Iterator { + return &FakeHistogramSeriesIterator{histograms: f.histograms, idx: -1} +} + +// FakeHistogramSeriesIterator implements chunkenc.Iterator for histogram data. +type FakeHistogramSeriesIterator struct { + histograms []promql.HPoint + idx int +} + +func (f *FakeHistogramSeriesIterator) Next() chunkenc.ValueType { + f.idx++ + if f.idx < len(f.histograms) { + return chunkenc.ValFloatHistogram + } + return chunkenc.ValNone +} + +func (f *FakeHistogramSeriesIterator) Seek(t int64) chunkenc.ValueType { + for f.idx < len(f.histograms)-1 { + f.idx++ + if f.histograms[f.idx].T >= t { + return chunkenc.ValFloatHistogram + } + } + return chunkenc.ValNone +} + +func (*FakeHistogramSeriesIterator) At() (int64, float64) { + panic("not a float value") +} + +func (*FakeHistogramSeriesIterator) AtHistogram(*histogram.Histogram) (int64, *histogram.Histogram) { + panic("not implemented") +} + +func (f *FakeHistogramSeriesIterator) AtFloatHistogram(*histogram.FloatHistogram) (int64, *histogram.FloatHistogram) { + h := f.histograms[f.idx] + return h.T, h.H +} + +func (f *FakeHistogramSeriesIterator) AtT() int64 { + return f.histograms[f.idx].T +} + +func (*FakeHistogramSeriesIterator) AtST() int64 { + return 0 +} + +func (*FakeHistogramSeriesIterator) Err() error { + return nil +} + +// FakeExemplarQueryable implements storage.ExemplarQueryable. +type FakeExemplarQueryable struct{} + +func (*FakeExemplarQueryable) ExemplarQuerier(_ context.Context) (storage.ExemplarQuerier, error) { + return &FakeExemplarQuerier{}, nil +} + +// FakeExemplarQuerier implements storage.ExemplarQuerier. +type FakeExemplarQuerier struct{} + +func (*FakeExemplarQuerier) Select(_, _ int64, _ ...[]*labels.Matcher) ([]exemplar.QueryResult, error) { + return nil, nil +} + +// FakeRulesRetriever implements v1.RulesRetriever. +type FakeRulesRetriever struct { + groups []*rules.Group +} + +func (f *FakeRulesRetriever) RuleGroups() []*rules.Group { + return f.groups +} + +func (f *FakeRulesRetriever) AlertingRules() []*rules.AlertingRule { + var alertingRules []*rules.AlertingRule + for _, g := range f.groups { + for _, r := range g.Rules() { + if ar, ok := r.(*rules.AlertingRule); ok { + alertingRules = append(alertingRules, ar) + } + } + } + return alertingRules +} + +// FakeTargetRetriever implements v1.TargetRetriever. +type FakeTargetRetriever struct { + active map[string][]*scrape.Target + dropped map[string][]*scrape.Target + droppedCounts map[string]int + scrapeConfig map[string]*config.ScrapeConfig +} + +func (f *FakeTargetRetriever) TargetsActive() map[string][]*scrape.Target { + if f.active == nil { + return make(map[string][]*scrape.Target) + } + return f.active +} + +func (f *FakeTargetRetriever) TargetsDropped() map[string][]*scrape.Target { + if f.dropped == nil { + return make(map[string][]*scrape.Target) + } + return f.dropped +} + +func (f *FakeTargetRetriever) TargetsDroppedCounts() map[string]int { + if f.droppedCounts == nil { + return make(map[string]int) + } + return f.droppedCounts +} + +func (f *FakeTargetRetriever) ScrapePoolConfig(name string) (*config.ScrapeConfig, error) { + if f.scrapeConfig == nil { + return nil, nil + } + return f.scrapeConfig[name], nil +} + +// FakeScrapePoolsRetriever implements v1.ScrapePoolsRetriever. +type FakeScrapePoolsRetriever struct { + pools []string +} + +func (f *FakeScrapePoolsRetriever) ScrapePools() []string { + if f.pools == nil { + return []string{} + } + return f.pools +} + +// FakeAlertmanagerRetriever implements v1.AlertmanagerRetriever. +type FakeAlertmanagerRetriever struct{} + +func (*FakeAlertmanagerRetriever) Alertmanagers() []*url.URL { + return nil +} + +func (*FakeAlertmanagerRetriever) DroppedAlertmanagers() []*url.URL { + return nil +} + +// FakeTSDBAdminStats implements v1.TSDBAdminStats. +type FakeTSDBAdminStats struct{} + +func (*FakeTSDBAdminStats) CleanTombstones() error { + return nil +} + +func (*FakeTSDBAdminStats) Delete(_ context.Context, _, _ int64, _ ...*labels.Matcher) error { + return nil +} + +func (*FakeTSDBAdminStats) Snapshot(_ string, _ bool) error { + return nil +} + +func (*FakeTSDBAdminStats) Stats(_ string, _ int) (*tsdb.Stats, error) { + return &tsdb.Stats{}, nil +} + +func (*FakeTSDBAdminStats) WALReplayStatus() (tsdb.WALReplayStatus, error) { + return tsdb.WALReplayStatus{}, nil +} + +func (*FakeTSDBAdminStats) BlockMetas() ([]tsdb.BlockMeta, error) { + return []tsdb.BlockMeta{}, nil +} + +// NewEmptyQueryable returns a queryable with no series. +func NewEmptyQueryable() storage.SampleAndChunkQueryable { + return &FakeQueryable{series: []storage.Series{}} +} + +// NewQueryableWithSeries returns a queryable with the given series. +func NewQueryableWithSeries(series []storage.Series) storage.SampleAndChunkQueryable { + return &FakeQueryable{series: series} +} + +// TSDBNotReadyQueryable implements storage.SampleAndChunkQueryable that returns tsdb.ErrNotReady. +type TSDBNotReadyQueryable struct{} + +func (*TSDBNotReadyQueryable) Querier(_, _ int64) (storage.Querier, error) { + return nil, tsdb.ErrNotReady +} + +func (*TSDBNotReadyQueryable) ChunkQuerier(_, _ int64) (storage.ChunkQuerier, error) { + return nil, tsdb.ErrNotReady +} + +// NewTSDBNotReadyQueryable returns a queryable that always returns tsdb.ErrNotReady. +func NewTSDBNotReadyQueryable() storage.SampleAndChunkQueryable { + return &TSDBNotReadyQueryable{} +} + +// NewEmptyExemplarQueryable returns an exemplar queryable with no exemplars. +func NewEmptyExemplarQueryable() storage.ExemplarQueryable { + return &FakeExemplarQueryable{} +} + +// NewEmptyRulesRetriever returns a rules retriever with no rules. +func NewEmptyRulesRetriever() *FakeRulesRetriever { + return &FakeRulesRetriever{groups: []*rules.Group{}} +} + +// NewRulesRetrieverWithGroups returns a rules retriever with the given groups. +func NewRulesRetrieverWithGroups(groups []*rules.Group) *FakeRulesRetriever { + return &FakeRulesRetriever{groups: groups} +} + +// NewEmptyTargetRetriever returns a target retriever with no targets. +func NewEmptyTargetRetriever() *FakeTargetRetriever { + return &FakeTargetRetriever{} +} + +// NewEmptyScrapePoolsRetriever returns a scrape pools retriever with no pools. +func NewEmptyScrapePoolsRetriever() *FakeScrapePoolsRetriever { + return &FakeScrapePoolsRetriever{pools: []string{}} +} + +// NewEmptyAlertmanagerRetriever returns an alertmanager retriever with no alertmanagers. +func NewEmptyAlertmanagerRetriever() *FakeAlertmanagerRetriever { + return &FakeAlertmanagerRetriever{} +} + +// NewEmptyTSDBAdminStats returns a TSDB admin stats with no-op implementations. +func NewEmptyTSDBAdminStats() *FakeTSDBAdminStats { + return &FakeTSDBAdminStats{} +} diff --git a/web/api/testhelpers/openapi.go b/web/api/testhelpers/openapi.go new file mode 100644 index 0000000000..d2e88943d2 --- /dev/null +++ b/web/api/testhelpers/openapi.go @@ -0,0 +1,204 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file provides OpenAPI-specific test utilities for validating spec compliance. +package testhelpers + +import ( + "bytes" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "strings" + "sync" + + "github.com/pb33f/libopenapi" + validator "github.com/pb33f/libopenapi-validator" + valerrors "github.com/pb33f/libopenapi-validator/errors" + "github.com/stretchr/testify/require" +) + +var ( + openAPIValidator31 validator.Validator + openAPIValidator32 validator.Validator + openAPIValidatorOnce sync.Once + openAPIValidatorErr error +) + +// loadOpenAPIValidators loads and caches both OpenAPI 3.1 and 3.2 validators from golden files. +func loadOpenAPIValidators() (v31, v32 validator.Validator, err error) { + openAPIValidatorOnce.Do(func() { + // Load OpenAPI 3.1 validator. + goldenPath31 := filepath.Join("testdata", "openapi_3.1_golden.yaml") + specBytes31, err := os.ReadFile(goldenPath31) + if err != nil { + openAPIValidatorErr = fmt.Errorf("failed to read OpenAPI 3.1 spec from %s: %w", goldenPath31, err) + return + } + + doc31, err := libopenapi.NewDocument(specBytes31) + if err != nil { + openAPIValidatorErr = fmt.Errorf("failed to parse OpenAPI 3.1 document: %w", err) + return + } + + v31, errs := validator.NewValidator(doc31) + if len(errs) > 0 { + openAPIValidatorErr = fmt.Errorf("failed to create OpenAPI 3.1 validator: %v", errs) + return + } + + openAPIValidator31 = v31 + + // Load OpenAPI 3.2 validator. + goldenPath32 := filepath.Join("testdata", "openapi_3.2_golden.yaml") + specBytes32, err := os.ReadFile(goldenPath32) + if err != nil { + openAPIValidatorErr = fmt.Errorf("failed to read OpenAPI 3.2 spec from %s: %w", goldenPath32, err) + return + } + + doc32, err := libopenapi.NewDocument(specBytes32) + if err != nil { + openAPIValidatorErr = fmt.Errorf("failed to parse OpenAPI 3.2 document: %w", err) + return + } + + v32, errs := validator.NewValidator(doc32) + if len(errs) > 0 { + openAPIValidatorErr = fmt.Errorf("failed to create OpenAPI 3.2 validator: %v", errs) + return + } + + openAPIValidator32 = v32 + }) + + if openAPIValidatorErr != nil { + return nil, nil, openAPIValidatorErr + } + + return openAPIValidator31, openAPIValidator32, nil +} + +// ValidateOpenAPI validates the request and response against both OpenAPI 3.1 and 3.2 specifications. +// This ensures API endpoints are compatible with both OpenAPI versions. +// Returns the response for chaining. +func (r *Response) ValidateOpenAPI() *Response { + r.t.Helper() + + // Load both validators (cached after first call). + v31, v32, err := loadOpenAPIValidators() + require.NoError(r.t, err, "failed to load OpenAPI validators") + + // Validate against OpenAPI 3.1 spec. + if r.request != nil { + r.validateRequestWithVersion(v31, "3.1") + } + r.validateResponseWithVersion(v31, "3.1") + + // Validate against OpenAPI 3.2 spec. + if r.request != nil { + r.validateRequestWithVersion(v32, "3.2") + } + r.validateResponseWithVersion(v32, "3.2") + + return r +} + +// validateRequestWithVersion validates the HTTP request against a specific OpenAPI version's spec. +func (r *Response) validateRequestWithVersion(v validator.Validator, version string) { + r.t.Helper() + + // Create a validation request from the original request. + validationReq := &http.Request{ + Method: r.request.Method, + URL: r.request.URL, + Header: r.request.Header, + Body: io.NopCloser(bytes.NewReader(r.requestBody)), + } + + // Validate the request. + valid, errors := v.ValidateHttpRequest(validationReq) + if !valid { + // Check if the error is because the path doesn't exist in this version. + // Some endpoints (like /notifications/live) only exist in 3.2, not 3.1. + if isPathNotFoundError(errors) && version == "3.1" && strings.Contains(r.request.URL.Path, "/notifications/live") { + // Expected: /notifications/live is only in OpenAPI 3.2. + return + } + + var errorMessages []string + for _, e := range errors { + errorMessages = append(errorMessages, e.Error()) + } + require.Fail(r.t, fmt.Sprintf("OpenAPI %s request validation failed", version), + "Request to %s %s failed OpenAPI %s validation:\n%v", + r.request.Method, r.request.URL.Path, version, errorMessages) + } +} + +// validateResponseWithVersion validates the HTTP response against a specific OpenAPI version's spec. +func (r *Response) validateResponseWithVersion(v validator.Validator, version string) { + r.t.Helper() + + // Create a validation request (needed for response validation context). + validationReq := &http.Request{ + Method: r.request.Method, + URL: r.request.URL, + Header: r.request.Header, + } + + // Create a response for validation. + validationResp := &http.Response{ + StatusCode: r.StatusCode, + Header: r.responseHeader, + Body: io.NopCloser(bytes.NewReader([]byte(r.Body))), + Request: validationReq, + } + + // Validate the response. + valid, errors := v.ValidateHttpResponse(validationReq, validationResp) + if !valid { + // Check if the error is because the path doesn't exist in this version. + // Some endpoints (like /notifications/live) only exist in 3.2, not 3.1. + if isPathNotFoundError(errors) && version == "3.1" && strings.Contains(r.request.URL.Path, "/notifications/live") { + // Expected: /notifications/live is only in OpenAPI 3.2. + return + } + + var errorMessages []string + for _, e := range errors { + errorMessages = append(errorMessages, e.Error()) + } + require.Fail(r.t, fmt.Sprintf("OpenAPI %s response validation failed", version), + "Response from %s %s (status %d) failed OpenAPI %s validation:\n%v", + r.request.Method, r.request.URL.Path, r.StatusCode, version, errorMessages) + } +} + +// isPathNotFoundError checks if the validation errors indicate a path was not found in the spec. +func isPathNotFoundError(errors []*valerrors.ValidationError) bool { + for _, err := range errors { + errStr := err.Error() + // Check for common "path not found" error messages from libopenapi-validator. + if strings.Contains(errStr, "path") && (strings.Contains(errStr, "not found") || strings.Contains(errStr, "does not exist")) { + return true + } + if strings.Contains(errStr, "GET /notifications/live") || strings.Contains(errStr, "/notifications/live not found") { + return true + } + } + return false +} diff --git a/web/api/testhelpers/request.go b/web/api/testhelpers/request.go new file mode 100644 index 0000000000..81650e4c49 --- /dev/null +++ b/web/api/testhelpers/request.go @@ -0,0 +1,145 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file provides HTTP request builders for testing API endpoints. +package testhelpers + +import ( + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" +) + +// Response wraps an HTTP response with parsed JSON data. +// It supports method chaining for assertions. +// +// Example usage: +// +// testhelpers.GET(t, api, "/api/v1/query", "query", "up"). +// ValidateOpenAPI(). +// RequireSuccess(). +// RequireEquals("$.data.resultType", "vector"). +// RequireLenAtLeast("$.data.result", 1) +// +// testhelpers.POST(t, api, "/api/v1/query", "query", "up"). +// ValidateOpenAPI(). +// RequireSuccess(). +// RequireArrayContains("$.data.result", expectedValue) +type Response struct { + StatusCode int + Body string + JSON map[string]any + t *testing.T + request *http.Request + requestBody []byte + responseHeader http.Header +} + +// GET sends a GET request to the API and returns a Response with parsed JSON. +// queryParams should be pairs of key-value strings. +func GET(t *testing.T, api *APIWrapper, path string, queryParams ...string) *Response { + t.Helper() + + if len(queryParams)%2 != 0 { + t.Fatal("queryParams must be key-value pairs") + } + + // Build query string. + values := url.Values{} + for i := 0; i < len(queryParams); i += 2 { + values.Add(queryParams[i], queryParams[i+1]) + } + + fullPath := path + if len(values) > 0 { + fullPath = path + "?" + values.Encode() + } + + req := httptest.NewRequest(http.MethodGet, fullPath, nil) + return executeRequest(t, api, req) +} + +// POST sends a POST request to the API with the given body and returns a Response with parsed JSON. +// bodyParams should be pairs of key-value strings for form data. +func POST(t *testing.T, api *APIWrapper, path string, bodyParams ...string) *Response { + t.Helper() + + if len(bodyParams)%2 != 0 { + t.Fatal("bodyParams must be key-value pairs") + } + + // Build form data. + values := url.Values{} + for i := 0; i < len(bodyParams); i += 2 { + values.Add(bodyParams[i], bodyParams[i+1]) + } + + req := httptest.NewRequest(http.MethodPost, path, strings.NewReader(values.Encode())) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + return executeRequest(t, api, req) +} + +// executeRequest executes an HTTP request and parses the response as JSON. +func executeRequest(t *testing.T, api *APIWrapper, req *http.Request) *Response { + t.Helper() + + // Capture the request body for validation. + var requestBody []byte + if req.Body != nil { + var err error + requestBody, err = io.ReadAll(req.Body) + if err != nil { + t.Fatalf("failed to read request body: %v", err) + } + // Restore the body for the actual request. + req.Body = io.NopCloser(strings.NewReader(string(requestBody))) + } + + recorder := httptest.NewRecorder() + api.Handler.ServeHTTP(recorder, req) + + result := recorder.Result() + defer result.Body.Close() + + bodyBytes, err := io.ReadAll(result.Body) + if err != nil { + t.Fatalf("failed to read response body: %v", err) + } + + resp := &Response{ + StatusCode: result.StatusCode, + Body: string(bodyBytes), + t: t, + request: req, + requestBody: requestBody, + responseHeader: result.Header, + } + + // Try to parse as JSON. + if result.Header.Get("Content-Type") == "application/json" || strings.Contains(result.Header.Get("Content-Type"), "application/json") { + var jsonData map[string]any + if err := json.Unmarshal(bodyBytes, &jsonData); err != nil { + // If JSON parsing fails, leave JSON as nil. + // This allows tests to handle non-JSON responses. + resp.JSON = nil + } else { + resp.JSON = jsonData + } + } + + return resp +} diff --git a/web/api/v1/api.go b/web/api/v1/api.go index f32fee19f8..456bafc97d 100644 --- a/web/api/v1/api.go +++ b/web/api/v1/api.go @@ -258,6 +258,7 @@ type API struct { codecs []Codec featureRegistry features.Collector + openAPIBuilder *OpenAPIBuilder } // NewAPI returns an initialized API type. @@ -299,6 +300,7 @@ func NewAPI( appendMetadata bool, overrideErrorCode OverrideErrorCode, featureRegistry features.Collector, + openAPIOptions OpenAPIOptions, ) *API { a := &API{ QueryEngine: qe, @@ -329,6 +331,7 @@ func NewAPI( notificationsSub: notificationsSub, overrideErrorCode: overrideErrorCode, featureRegistry: featureRegistry, + openAPIBuilder: NewOpenAPIBuilder(openAPIOptions, logger), remoteReadHandler: remote.NewReadHandler(logger, registerer, q, configFunc, remoteReadSampleLimit, remoteReadConcurrencyLimit, remoteReadMaxBytesInFrame), } @@ -400,7 +403,7 @@ func (api *API) Register(r *route.Router) { w.WriteHeader(http.StatusNoContent) }) return api.ready(httputil.CompressionHandler{ - Handler: hf, + Handler: api.openAPIBuilder.WrapHandler(hf), }.ServeHTTP) } @@ -469,6 +472,9 @@ func (api *API) Register(r *route.Router) { r.Put("/admin/tsdb/delete_series", wrapAgent(api.deleteSeries)) r.Put("/admin/tsdb/clean_tombstones", wrapAgent(api.cleanTombstones)) r.Put("/admin/tsdb/snapshot", wrapAgent(api.snapshot)) + + // OpenAPI endpoint. + r.Get("/openapi.yaml", api.ready(api.openAPIBuilder.ServeOpenAPI)) } type QueryData struct { diff --git a/web/api/v1/api_scenarios_test.go b/web/api/v1/api_scenarios_test.go new file mode 100644 index 0000000000..a707680c57 --- /dev/null +++ b/web/api/v1/api_scenarios_test.go @@ -0,0 +1,419 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "strconv" + "testing" + "time" + + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/web/api/testhelpers" +) + +// TODO: Generate automated tests from OpenAPI spec to validate API responses. + +// TestAPIEmpty tests the API with no metrics and no rules. +func TestAPIEmpty(t *testing.T) { + // Create an API with empty defaults (no series, no rules). + api := newTestAPI(t, testhelpers.APIConfig{}) + + t.Run("GET /api/v1/labels returns success with empty array", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/labels"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data") + }) + + t.Run("GET /api/v1/query?query=up returns success (empty result ok)", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", "up"). + ValidateOpenAPI(). + RequireSuccess(). + RequireEquals("$.data.resultType", "vector") + }) + + t.Run("GET /api/v1/query_range?query=up returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query_range", + "query", "up", + "start", "0", + "end", "100", + "step", "10"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "matrix") + }) + + t.Run("GET /api/v1/series returns success with empty result", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/series", + "match[]", "up", + "start", "0", + "end", "100"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data") + }) + + t.Run("GET /api/v1/label/__name__/values returns success with empty array", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/label/__name__/values"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data") + }) + + t.Run("GET /api/v1/targets returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/targets"). + RequireSuccess(). + RequireJSONPathExists("$.data.activeTargets") + }) + + t.Run("GET /api/v1/rules returns success with empty groups", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/rules"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.groups") + }) + + t.Run("GET /api/v1/alerts returns success with empty alerts", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/alerts"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.alerts") + }) + + t.Run("GET /api/v1/alertmanagers returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/alertmanagers"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.activeAlertmanagers") + }) + + t.Run("GET /api/v1/metadata returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/metadata"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data") + }) + + t.Run("GET /api/v1/status/config returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/status/config"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.yaml") + }) + + t.Run("GET /api/v1/status/flags returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/status/flags"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data") + }) + + t.Run("GET /api/v1/status/runtimeinfo returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/status/runtimeinfo"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data") + }) + + t.Run("GET /api/v1/status/buildinfo returns success", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/status/buildinfo"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data") + }) + + t.Run("POST /api/v1/query with form data returns success", func(t *testing.T) { + testhelpers.POST(t, api, "/api/v1/query", "query", "up"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector") + }) +} + +// TestAPIWithSeries tests the API with metrics/series data. +func TestAPIWithSeries(t *testing.T) { + // Create an API with sample series data. + api := newTestAPI(t, testhelpers.APIConfig{ + Queryable: testhelpers.NewLazyLoader(func() storage.SampleAndChunkQueryable { + return testhelpers.NewQueryableWithSeries(testhelpers.FixtureMultipleSeries()) + }), + }) + + t.Run("GET /api/v1/query returns vector with >= 1 sample", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", "up"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1) + }) + + t.Run("GET /api/v1/query_range returns matrix result type", func(t *testing.T) { + // Use relative timestamps to match our fixtures. + now := time.Now().Unix() + testhelpers.GET(t, api, "/api/v1/query_range", + "query", "up", + "start", strconv.FormatInt(now-120, 10), + "end", strconv.FormatInt(now, 10), + "step", "60"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "matrix") + // Note: Result may be empty if timestamps don't align perfectly with samples. + }) + + t.Run("GET /api/v1/labels returns non-empty array", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/labels"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data"). + RequireLenAtLeast("$.data", 1) + }) + + t.Run("GET /api/v1/label/__name__/values contains expected metric names", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/label/__name__/values"). + RequireSuccess(). + ValidateOpenAPI(). + RequireArrayContains("$.data", "up"). + RequireArrayContains("$.data", "http_requests_total") + }) + + t.Run("GET /api/v1/label/job/values contains expected jobs", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/label/job/values"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data"). + RequireArrayContains("$.data", "prometheus"). + RequireArrayContains("$.data", "node"). + RequireArrayContains("$.data", "api") + }) + + t.Run("GET /api/v1/series with match returns results", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/series", + "match[]", "up", + "start", "0", + "end", "120"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data"). + RequireLenAtLeast("$.data", 1) + }) + + t.Run("GET /api/v1/query with specific job returns filtered results", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", `up{job="prometheus"}`). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1) + }) + + t.Run("GET /api/v1/query with aggregation returns result", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", "sum(up)"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector") + }) + + t.Run("POST /api/v1/query returns vector with data", func(t *testing.T) { + testhelpers.POST(t, api, "/api/v1/query", "query", "up"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1) + }) +} + +// TestAPIWithRules tests the API with rules configured. +func TestAPIWithRules(t *testing.T) { + // Create an API with rule groups. + api := newTestAPI(t, testhelpers.APIConfig{ + RulesRetriever: testhelpers.NewLazyLoader(func() testhelpers.RulesRetriever { + return testhelpers.NewRulesRetrieverWithGroups(testhelpers.FixtureRuleGroups()) + }), + }) + + t.Run("GET /api/v1/rules returns groups with rules", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/rules"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.groups"). + RequireLenAtLeast("$.data.groups", 1). + RequireSome("$.data.groups", func(group any) bool { + if g, ok := group.(map[string]any); ok { + return g["name"] == "example" + } + return false + }). + RequireSome("$.data.groups", func(group any) bool { + if g, ok := group.(map[string]any); ok { + if g["name"] == "example" { + // Check that the group has rules. + if rules, ok := g["rules"].([]any); ok { + return len(rules) > 0 + } + } + } + return false + }) + }) + + t.Run("GET /api/v1/alerts returns alerts array", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/alerts"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.alerts"). + RequireJSONArray("$.data.alerts") + }) + + t.Run("GET /api/v1/rules with rule_name filter", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/rules", "rule_name[]", "InstanceDown"). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONPathExists("$.data.groups") + }) +} + +// TestAPITSDBNotReady tests the API when TSDB is not ready (e.g., during WAL replay). +// TSDB not ready errors are converted to errorUnavailable by setUnavailStatusOnTSDBNotReady, +// which returns HTTP 500 Internal Server Error (the default for errorUnavailable). +func TestAPITSDBNotReady(t *testing.T) { + // Create an API with a queryable that returns tsdb.ErrNotReady. + api := newTestAPI(t, testhelpers.APIConfig{ + Queryable: testhelpers.NewLazyLoader(testhelpers.NewTSDBNotReadyQueryable), + }) + + t.Run("GET /api/v1/query returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", "up"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) + + t.Run("POST /api/v1/query returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.POST(t, api, "/api/v1/query", "query", "up"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) + + t.Run("GET /api/v1/query_range returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query_range", + "query", "up", + "start", "0", + "end", "100", + "step", "10"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) + + t.Run("GET /api/v1/series returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/series", + "match[]", "up", + "start", "0", + "end", "100"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) + + t.Run("GET /api/v1/labels returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/labels"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) + + t.Run("GET /api/v1/label/{name}/values returns 500 when TSDB not ready", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/label/__name__/values"). + RequireStatusCode(500). + ValidateOpenAPI(). + RequireError() + }) +} + +// TestAPIWithNativeHistograms tests the API with native histogram data. +func TestAPIWithNativeHistograms(t *testing.T) { + // Create an API with histogram series data. + api := newTestAPI(t, testhelpers.APIConfig{ + Queryable: testhelpers.NewLazyLoader(func() storage.SampleAndChunkQueryable { + return testhelpers.NewQueryableWithSeries(testhelpers.FixtureHistogramSeries()) + }), + }) + + t.Run("GET /api/v1/query returns vector with native histogram", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", "test_histogram"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1). + RequireSome("$.data.result", func(item any) bool { + sample, ok := item.(map[string]any) + if !ok { + return false + } + // Check that the sample has a histogram field (not a value field). + _, hasHistogram := sample["histogram"] + return hasHistogram + }) + }) + + t.Run("POST /api/v1/query returns vector with native histogram", func(t *testing.T) { + testhelpers.POST(t, api, "/api/v1/query", "query", "test_histogram"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1). + RequireSome("$.data.result", func(item any) bool { + sample, ok := item.(map[string]any) + if !ok { + return false + } + // Check that the sample has a histogram field (not a value field). + _, hasHistogram := sample["histogram"] + return hasHistogram + }) + }) + + t.Run("GET /api/v1/query_range returns matrix with native histogram", func(t *testing.T) { + // Use relative timestamps to match our fixtures. + now := time.Now().Unix() + testhelpers.GET(t, api, "/api/v1/query_range", + "query", "test_histogram", + "start", strconv.FormatInt(now-120, 10), + "end", strconv.FormatInt(now, 10), + "step", "60"). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "matrix") + }) + + t.Run("GET /api/v1/query with histogram selector", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/query", "query", `test_histogram{job="prometheus"}`). + RequireSuccess(). + ValidateOpenAPI(). + RequireEquals("$.data.resultType", "vector"). + RequireLenAtLeast("$.data.result", 1) + }) + + t.Run("GET /api/v1/series returns histogram metric series", func(t *testing.T) { + testhelpers.GET(t, api, "/api/v1/series", + "match[]", "test_histogram", + "start", "0", + "end", strconv.FormatInt(time.Now().Unix(), 10)). + RequireSuccess(). + ValidateOpenAPI(). + RequireJSONArray("$.data"). + RequireLenAtLeast("$.data", 1) + }) +} diff --git a/web/api/v1/errors_test.go b/web/api/v1/errors_test.go index 6e55089e16..850bedef17 100644 --- a/web/api/v1/errors_test.go +++ b/web/api/v1/errors_test.go @@ -169,6 +169,7 @@ func createPrometheusAPI(t *testing.T, q storage.SampleAndChunkQueryable, overri false, overrideErrorCode, nil, + OpenAPIOptions{}, ) promRouter := route.New().WithPrefix("/api/v1") diff --git a/web/api/v1/openapi.go b/web/api/v1/openapi.go new file mode 100644 index 0000000000..59fa8969ef --- /dev/null +++ b/web/api/v1/openapi.go @@ -0,0 +1,320 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file implements OpenAPI 3.2 specification generation for the Prometheus HTTP API. +// It provides dynamic spec building with optional path filtering. +package v1 + +import ( + "log/slog" + "net/http" + "net/url" + "path" + "strings" + "sync" + + "github.com/pb33f/libopenapi/datamodel/high/base" + v3 "github.com/pb33f/libopenapi/datamodel/high/v3" + "github.com/pb33f/libopenapi/orderedmap" +) + +const ( + // OpenAPI 3.1.0 is the default version with broader compatibility. + openAPIVersion31 = "3.1.0" + // OpenAPI 3.2.0 supports advanced features like itemSchema for SSE streams. + openAPIVersion32 = "3.2.0" +) + +// OpenAPIOptions configures the OpenAPI spec builder. +type OpenAPIOptions struct { + // IncludePaths filters which paths to include in the spec. + // If empty, all paths are included. + // Paths are matched by prefix (e.g., "/query" matches "/query" and "/query_range"). + IncludePaths []string + + // ExternalURL is the external URL of the Prometheus server (e.g., "http://prometheus.example.com:9090"). + ExternalURL string + + // Version is the API version to include in the OpenAPI spec. + // If empty, defaults to "0.0.1-undefined". + Version string +} + +// OpenAPIBuilder builds and caches OpenAPI specifications. +type OpenAPIBuilder struct { + mu sync.RWMutex + cachedYAML31 []byte // Cached OpenAPI 3.1 spec. + cachedYAML32 []byte // Cached OpenAPI 3.2 spec. + options OpenAPIOptions + logger *slog.Logger +} + +// NewOpenAPIBuilder creates a new OpenAPI builder with the given options. +func NewOpenAPIBuilder(opts OpenAPIOptions, logger *slog.Logger) *OpenAPIBuilder { + b := &OpenAPIBuilder{ + options: opts, + logger: logger, + } + + b.rebuild() + return b +} + +// rebuild constructs the OpenAPI specs for both 3.1 and 3.2 versions based on current options. +func (b *OpenAPIBuilder) rebuild() { + b.mu.Lock() + defer b.mu.Unlock() + + // Build OpenAPI 3.1 spec. + doc31 := b.buildDocument(openAPIVersion31) + yamlBytes31, err := doc31.Render() + if err != nil { + b.logger.Error("failed to render OpenAPI 3.1 spec - this is a bug, please report it", "err", err) + return + } + b.cachedYAML31 = yamlBytes31 + + // Build OpenAPI 3.2 spec. + doc32 := b.buildDocument(openAPIVersion32) + yamlBytes32, err := doc32.Render() + if err != nil { + b.logger.Error("failed to render OpenAPI 3.2 spec - this is a bug, please report it", "err", err) + return + } + b.cachedYAML32 = yamlBytes32 +} + +// ServeOpenAPI returns the OpenAPI specification as YAML. +// By default, serves OpenAPI 3.1.0. Use ?openapi_version=3.2 for OpenAPI 3.2.0. +func (b *OpenAPIBuilder) ServeOpenAPI(w http.ResponseWriter, r *http.Request) { + // Parse query parameter to determine which version to serve. + requestedVersion := r.URL.Query().Get("openapi_version") + + b.mu.RLock() + var yamlData []byte + switch requestedVersion { + case "3.2", "3.2.0": + yamlData = b.cachedYAML32 + case "3.1", "3.1.0": + yamlData = b.cachedYAML31 + default: + // Default to OpenAPI 3.1.0 for broader compatibility. + yamlData = b.cachedYAML31 + } + b.mu.RUnlock() + + w.Header().Set("Content-Type", "application/yaml; charset=utf-8") + w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate") + w.WriteHeader(http.StatusOK) + w.Write(yamlData) +} + +// WrapHandler returns the handler unchanged (no validation). +func (*OpenAPIBuilder) WrapHandler(next http.HandlerFunc) http.HandlerFunc { + return next +} + +// shouldIncludePath checks if a path should be included based on options. +func (b *OpenAPIBuilder) shouldIncludePath(path string) bool { + if len(b.options.IncludePaths) == 0 { + return true + } + for _, include := range b.options.IncludePaths { + if strings.HasPrefix(path, include) || path == include { + return true + } + } + return false +} + +// shouldIncludePathForVersion checks if a path should be included for a specific OpenAPI version. +func (b *OpenAPIBuilder) shouldIncludePathForVersion(path, version string) bool { + // First check IncludePaths filter. + if !b.shouldIncludePath(path) { + return false + } + + // OpenAPI 3.1 excludes paths that require 3.2 features. + // The /notifications/live endpoint uses itemSchema which is a 3.2-only feature. + if version == openAPIVersion31 && path == "/notifications/live" { + return false + } + + return true +} + +// buildDocument creates the OpenAPI document for the specified version using high-level structs. +func (b *OpenAPIBuilder) buildDocument(version string) *v3.Document { + return &v3.Document{ + Version: version, + Info: b.buildInfo(), + Servers: b.buildServers(), + Tags: b.buildTags(version), + Paths: b.buildPaths(version), + Components: b.buildComponents(), + } +} + +// buildInfo constructs the info section. +func (b *OpenAPIBuilder) buildInfo() *base.Info { + apiVersion := b.options.Version + if apiVersion == "" { + apiVersion = "0.0.1-undefined" + } + return &base.Info{ + Title: "Prometheus API", + Description: "Prometheus is an Open-Source monitoring system with a dimensional data model, flexible query language, efficient time series database and modern alerting approach.", + Version: apiVersion, + Contact: &base.Contact{ + Name: "Prometheus Community", + URL: "https://prometheus.io/community/", + }, + } +} + +// buildServers constructs the servers section. +func (b *OpenAPIBuilder) buildServers() []*v3.Server { + // ExternalURL is always set by computeExternalURL in main.go. + // It includes scheme, host, port, and optional path prefix (without trailing slash). + serverURL := "/api/v1" + if b.options.ExternalURL != "" { + baseURL, err := url.Parse(b.options.ExternalURL) + if err == nil { + // Use path.Join to properly append /api/v1 to the existing path. + // Then use ResolveReference to construct the full URL. + baseURL.Path = path.Join(baseURL.Path, "/api/v1") + serverURL = baseURL.String() + } + } + return []*v3.Server{ + {URL: serverURL}, + } +} + +// buildTags constructs the global tags list. +// Tag summary is an OpenAPI 3.2 feature, excluded from 3.1. +// Tag description is supported in both 3.1 and 3.2. +func (*OpenAPIBuilder) buildTags(version string) []*base.Tag { + // Define tags with all metadata. + tagData := []struct { + name string + summary string + description string + }{ + {"query", "Query", "Query and evaluate PromQL expressions."}, + {"metadata", "Metadata", "Retrieve metric metadata such as type and unit."}, + {"labels", "Labels", "Query label names and values."}, + {"series", "Series", "Query and manage time series."}, + {"targets", "Targets", "Retrieve target and scrape pool information."}, + {"rules", "Rules", "Query recording and alerting rules."}, + {"alerts", "Alerts", "Query active alerts and alertmanager discovery."}, + {"status", "Status", "Retrieve server status and configuration."}, + {"admin", "Admin", "Administrative operations for TSDB management."}, + {"features", "Features", "Query enabled features."}, + {"remote", "Remote Storage", "Remote read and write endpoints."}, + {"otlp", "OTLP", "OpenTelemetry Protocol metrics ingestion."}, + {"notifications", "Notifications", "Server notifications and events."}, + } + + tags := make([]*base.Tag, 0, len(tagData)) + for _, td := range tagData { + tag := &base.Tag{ + Name: td.name, + Description: td.description, // Description is supported in both 3.1 and 3.2. + } + + // Summary is an OpenAPI 3.2 feature only. + if version == openAPIVersion32 { + tag.Summary = td.summary + } + + tags = append(tags, tag) + } + + return tags +} + +// buildPaths constructs all API path definitions. +func (b *OpenAPIBuilder) buildPaths(version string) *v3.Paths { + pathItems := orderedmap.New[string, *v3.PathItem]() + + allPaths := b.getAllPathDefinitions() + for pair := allPaths.First(); pair != nil; pair = pair.Next() { + if b.shouldIncludePathForVersion(pair.Key(), version) { + pathItems.Set(pair.Key(), pair.Value()) + } + } + + return &v3.Paths{PathItems: pathItems} +} + +// getAllPathDefinitions returns all path definitions. +func (b *OpenAPIBuilder) getAllPathDefinitions() *orderedmap.Map[string, *v3.PathItem] { + paths := orderedmap.New[string, *v3.PathItem]() + + // Query endpoints. + paths.Set("/query", b.queryPath()) + paths.Set("/query_range", b.queryRangePath()) + paths.Set("/query_exemplars", b.queryExemplarsPath()) + paths.Set("/format_query", b.formatQueryPath()) + paths.Set("/parse_query", b.parseQueryPath()) + + // Label endpoints. + paths.Set("/labels", b.labelsPath()) + paths.Set("/label/{name}/values", b.labelValuesPath()) + + // Series endpoints. + paths.Set("/series", b.seriesPath()) + + // Metadata endpoints. + paths.Set("/metadata", b.metadataPath()) + + // Target endpoints. + paths.Set("/scrape_pools", b.scrapePoolsPath()) + paths.Set("/targets", b.targetsPath()) + paths.Set("/targets/metadata", b.targetsMetadataPath()) + paths.Set("/targets/relabel_steps", b.targetsRelabelStepsPath()) + + // Rules and alerts endpoints. + paths.Set("/rules", b.rulesPath()) + paths.Set("/alerts", b.alertsPath()) + paths.Set("/alertmanagers", b.alertmanagersPath()) + + // Status endpoints. + paths.Set("/status/config", b.statusConfigPath()) + paths.Set("/status/runtimeinfo", b.statusRuntimeInfoPath()) + paths.Set("/status/buildinfo", b.statusBuildInfoPath()) + paths.Set("/status/flags", b.statusFlagsPath()) + paths.Set("/status/tsdb", b.statusTSDBPath()) + paths.Set("/status/tsdb/blocks", b.statusTSDBBlocksPath()) + paths.Set("/status/walreplay", b.statusWALReplayPath()) + + // Admin endpoints. + paths.Set("/admin/tsdb/delete_series", b.adminDeleteSeriesPath()) + paths.Set("/admin/tsdb/clean_tombstones", b.adminCleanTombstonesPath()) + paths.Set("/admin/tsdb/snapshot", b.adminSnapshotPath()) + + // Remote endpoints. + paths.Set("/read", b.remoteReadPath()) + paths.Set("/write", b.remoteWritePath()) + paths.Set("/otlp/v1/metrics", b.otlpWritePath()) + + // Notifications endpoints. + paths.Set("/notifications", b.notificationsPath()) + paths.Set("/notifications/live", b.notificationsLivePath()) + + // Features endpoint. + paths.Set("/features", b.featuresPath()) + + return paths +} diff --git a/web/api/v1/openapi_coverage_test.go b/web/api/v1/openapi_coverage_test.go new file mode 100644 index 0000000000..103f82e08e --- /dev/null +++ b/web/api/v1/openapi_coverage_test.go @@ -0,0 +1,258 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + _ "embed" + "go/ast" + "go/parser" + "go/token" + "strconv" + "strings" + "testing" + + v3 "github.com/pb33f/libopenapi/datamodel/high/v3" + "github.com/prometheus/common/promslog" + "github.com/stretchr/testify/require" +) + +//go:embed api.go +var apiGoSource string + +// routeInfo represents a route extracted from the Register function. +type routeInfo struct { + method string + path string +} + +// extractRoutesFromRegister parses the api.go source and extracts all routes +// registered in the (*API) Register function using AST. +func extractRoutesFromRegister(t *testing.T, source string) []routeInfo { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "api.go", source, parser.ParseComments) + require.NoError(t, err, "failed to parse api.go") + + var registerFunc *ast.FuncDecl + + // Find the Register method on *API. + ast.Inspect(f, func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Body == nil { + return true + } + + if fn.Name.Name != "Register" { + return true + } + + // Ensure it's a method on *API. + if fn.Recv == nil || len(fn.Recv.List) != 1 { + return true + } + + star, ok := fn.Recv.List[0].Type.(*ast.StarExpr) + if !ok { + return true + } + + ident, ok := star.X.(*ast.Ident) + if !ok || ident.Name != "API" { + return true + } + + registerFunc = fn + return false // Stop walking once found. + }) + + require.NotNil(t, registerFunc, "Register method not found") + + var routes []routeInfo + + // Extract all r.Get, r.Post, r.Put, r.Delete, r.Options calls. + ast.Inspect(registerFunc.Body, func(n ast.Node) bool { + call, ok := n.(*ast.CallExpr) + if !ok { + return true + } + + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + // Check if it's a router method call. + method := sel.Sel.Name + if method != "Get" && method != "Post" && method != "Put" && method != "Delete" && method != "Del" && method != "Options" { + return true + } + + // Ensure the receiver is 'r'. + if x, ok := sel.X.(*ast.Ident); !ok || x.Name != "r" { + return true + } + + if len(call.Args) == 0 { + return true + } + + // Extract the path from the first argument. + lit, ok := call.Args[0].(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + return true + } + + path, err := strconv.Unquote(lit.Value) + if err != nil { + return true + } + + // Normalize Del to DELETE. + if method == "Del" { + method = "Delete" + } + + routes = append(routes, routeInfo{ + method: strings.ToUpper(method), + path: path, + }) + return true + }) + + return routes +} + +// normalizePathForOpenAPI converts route paths with colon parameters to OpenAPI format. +// e.g., "/label/:name/values" -> "/label/{name}/values". +func normalizePathForOpenAPI(path string) string { + // Replace :param with {param}. + parts := strings.Split(path, "/") + for i, part := range parts { + if trimmed, ok := strings.CutPrefix(part, ":"); ok { + parts[i] = "{" + trimmed + "}" + } + } + return strings.Join(parts, "/") +} + +// TestOpenAPICoverage verifies that all routes registered in the Register function +// are documented in the OpenAPI specification. +func TestOpenAPICoverage(t *testing.T) { + // Extract routes from api.go using AST. + routes := extractRoutesFromRegister(t, apiGoSource) + require.NotEmpty(t, routes, "no routes found in Register function") + + // Build OpenAPI spec. + builder := NewOpenAPIBuilder(OpenAPIOptions{}, promslog.NewNopLogger()) + allPaths := builder.getAllPathDefinitions() + + // Create a map of OpenAPI paths for quick lookup. + // Key is the normalized path, value is the PathItem. + openAPIPaths := make(map[string]bool) + for pair := allPaths.First(); pair != nil; pair = pair.Next() { + pathItem := pair.Value() + path := pair.Key() + + // Track which methods are defined for this path. + if pathItem.Get != nil { + openAPIPaths[path+":GET"] = true + } + if pathItem.Post != nil { + openAPIPaths[path+":POST"] = true + } + if pathItem.Put != nil { + openAPIPaths[path+":PUT"] = true + } + if pathItem.Delete != nil { + openAPIPaths[path+":DELETE"] = true + } + if pathItem.Options != nil { + openAPIPaths[path+":OPTIONS"] = true + } + } + + // Check coverage for each route. + var missingRoutes []string + ignoredRoutes := map[string]bool{ + "/*path:OPTIONS": true, // Wildcard OPTIONS handler. + "/openapi.yaml:GET": true, // Self-referential endpoint. + "/notifications/live:GET": true, // SSE endpoint (version-specific). + } + + for _, route := range routes { + normalizedPath := normalizePathForOpenAPI(route.path) + key := normalizedPath + ":" + route.method + + // Skip ignored routes. + if ignoredRoutes[key] { + continue + } + + if !openAPIPaths[key] { + missingRoutes = append(missingRoutes, key) + } + } + + if len(missingRoutes) > 0 { + t.Errorf("The following routes are registered but not documented in OpenAPI spec:\n%s", + strings.Join(missingRoutes, "\n")) + } +} + +// TestOpenAPIHasNoExtraRoutes verifies that the OpenAPI spec doesn't document +// routes that aren't actually registered. +func TestOpenAPIHasNoExtraRoutes(t *testing.T) { + // Extract routes from api.go using AST. + routes := extractRoutesFromRegister(t, apiGoSource) + require.NotEmpty(t, routes, "no routes found in Register function") + + // Create a map of registered routes. + registeredRoutes := make(map[string]bool) + for _, route := range routes { + normalizedPath := normalizePathForOpenAPI(route.path) + key := normalizedPath + ":" + route.method + registeredRoutes[key] = true + } + + // Build OpenAPI spec. + builder := NewOpenAPIBuilder(OpenAPIOptions{}, promslog.NewNopLogger()) + allPaths := builder.getAllPathDefinitions() + + // Check if any OpenAPI paths are not registered. + var extraRoutes []string + + for pair := allPaths.First(); pair != nil; pair = pair.Next() { + pathItem := pair.Value() + path := pair.Key() + + checkMethod := func(method string, op *v3.Operation) { + if op != nil { + key := path + ":" + method + if !registeredRoutes[key] { + extraRoutes = append(extraRoutes, key) + } + } + } + + checkMethod("GET", pathItem.Get) + checkMethod("POST", pathItem.Post) + checkMethod("PUT", pathItem.Put) + checkMethod("DELETE", pathItem.Delete) + checkMethod("OPTIONS", pathItem.Options) + } + + if len(extraRoutes) > 0 { + t.Errorf("The following routes are documented in OpenAPI but not registered:\n%s", + strings.Join(extraRoutes, "\n")) + } +} diff --git a/web/api/v1/openapi_examples.go b/web/api/v1/openapi_examples.go new file mode 100644 index 0000000000..50e155b184 --- /dev/null +++ b/web/api/v1/openapi_examples.go @@ -0,0 +1,1013 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file contains example request bodies and response data for OpenAPI documentation. +// Examples are included in the generated spec to provide realistic usage scenarios for API consumers. +package v1 + +import ( + "github.com/pb33f/libopenapi/datamodel/high/base" + "github.com/pb33f/libopenapi/orderedmap" + + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql" +) + +// Example builders for request bodies. + +func queryPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("simpleQuery", &base.Example{ + Summary: "Simple instant query", + Value: createYAMLNode(map[string]any{"query": "up"}), + }) + + examples.Set("queryWithTime", &base.Example{ + Summary: "Query with specific timestamp", + Value: createYAMLNode(map[string]any{ + "query": "up{job=\"prometheus\"}", + "time": "2026-01-02T13:37:00.000Z", + }), + }) + + examples.Set("queryWithLimit", &base.Example{ + Summary: "Query with limit and statistics", + Value: createYAMLNode(map[string]any{ + "query": "rate(prometheus_http_requests_total{handler=\"/api/v1/query\"}[5m])", + "limit": 100, + "stats": "all", + }), + }) + + return examples +} + +// queryRangePostExamples returns examples for POST /query_range endpoint. +func queryRangePostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("basicRange", &base.Example{ + Summary: "Basic range query", + Value: createYAMLNode(map[string]any{ + "query": "up", + "start": "2026-01-02T12:37:00.000Z", + "end": "2026-01-02T13:37:00.000Z", + "step": "15s", + }), + }) + + examples.Set("rateQuery", &base.Example{ + Summary: "Rate calculation over time range", + Value: createYAMLNode(map[string]any{ + "query": "rate(prometheus_http_requests_total{handler=\"/api/v1/query\"}[5m])", + "start": "2026-01-02T12:37:00.000Z", + "end": "2026-01-02T13:37:00.000Z", + "step": "30s", + "timeout": "30s", + }), + }) + + return examples +} + +// queryExemplarsPostExamples returns examples for POST /query_exemplars endpoint. +func queryExemplarsPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("basicExemplar", &base.Example{ + Summary: "Query exemplars for a metric", + Value: createYAMLNode(map[string]any{"query": "prometheus_http_requests_total"}), + }) + + examples.Set("exemplarWithTimeRange", &base.Example{ + Summary: "Exemplars within specific time range", + Value: createYAMLNode(map[string]any{ + "query": "prometheus_http_requests_total{job=\"prometheus\"}", + "start": "2026-01-02T12:37:00.000Z", + "end": "2026-01-02T13:37:00.000Z", + }), + }) + + return examples +} + +// formatQueryPostExamples returns examples for POST /format_query endpoint. +func formatQueryPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("simpleFormat", &base.Example{ + Summary: "Format a simple query", + Value: createYAMLNode(map[string]any{"query": "up{job=\"prometheus\"}"}), + }) + + examples.Set("complexFormat", &base.Example{ + Summary: "Format a complex query", + Value: createYAMLNode(map[string]any{"query": "sum(rate(http_requests_total[5m])) by (job, status)"}), + }) + + return examples +} + +// parseQueryPostExamples returns examples for POST /parse_query endpoint. +func parseQueryPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("simpleParse", &base.Example{ + Summary: "Parse a simple query", + Value: createYAMLNode(map[string]any{"query": "up"}), + }) + + examples.Set("complexParse", &base.Example{ + Summary: "Parse a complex query", + Value: createYAMLNode(map[string]any{"query": "rate(http_requests_total{job=\"api\"}[5m])"}), + }) + + return examples +} + +// labelsPostExamples returns examples for POST /labels endpoint. +func labelsPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("allLabels", &base.Example{ + Summary: "Get all label names", + Value: createYAMLNode(map[string]any{}), + }) + + examples.Set("labelsWithTimeRange", &base.Example{ + Summary: "Get label names within time range", + Value: createYAMLNode(map[string]any{ + "start": "2026-01-02T12:37:00.000Z", + "end": "2026-01-02T13:37:00.000Z", + }), + }) + + examples.Set("labelsWithMatch", &base.Example{ + Summary: "Get label names matching series selector", + Value: createYAMLNode(map[string]any{ + "match[]": []string{"up", "process_start_time_seconds{job=\"prometheus\"}"}, + }), + }) + + return examples +} + +// seriesPostExamples returns examples for POST /series endpoint. +func seriesPostExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("seriesMatch", &base.Example{ + Summary: "Find series by label matchers", + Value: createYAMLNode(map[string]any{ + "match[]": []string{"up"}, + }), + }) + + examples.Set("seriesWithTimeRange", &base.Example{ + Summary: "Find series with time range", + Value: createYAMLNode(map[string]any{ + "match[]": []string{"up", "process_cpu_seconds_total{job=\"prometheus\"}"}, + "start": "2026-01-02T12:37:00.000Z", + "end": "2026-01-02T13:37:00.000Z", + }), + }) + + return examples +} + +// Example builders for response bodies. + +// queryResponseExamples returns examples for /query response. +func queryResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + vectorResult := promql.Vector{ + promql.Sample{ + Metric: labels.FromStrings("__name__", "up", "job", "prometheus", "instance", "demo.prometheus.io:9090"), + T: 1767436620000, + F: 1, + }, + promql.Sample{ + Metric: labels.FromStrings("__name__", "up", "env", "demo", "job", "alertmanager", "instance", "demo.prometheus.io:9093"), + T: 1767436620000, + F: 1, + }, + } + + examples.Set("vectorResult", &base.Example{ + Summary: "Instant vector query: up", + Value: vectorExample(vectorResult), + }) + + examples.Set("scalarResult", &base.Example{ + Summary: "Scalar query: scalar(42)", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "resultType": "scalar", + "result": []any{1767436620, "42"}, + }, + }), + }) + + matrixResult := promql.Matrix{ + promql.Series{ + Metric: labels.FromStrings("__name__", "up", "job", "prometheus", "instance", "demo.prometheus.io:9090"), + Floats: []promql.FPoint{ + {T: 1767436320000, F: 1}, + {T: 1767436620000, F: 1}, + }, + }, + } + + examples.Set("matrixResult", &base.Example{ + Summary: "Range vector query: up[5m]", + Value: matrixExample(matrixResult), + }) + + // TODO: Add native histogram example. + + return examples +} + +// queryRangeResponseExamples returns examples for /query_range response. +func queryRangeResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + matrixResult := promql.Matrix{ + promql.Series{ + Metric: labels.FromStrings("__name__", "up", "job", "prometheus", "instance", "demo.prometheus.io:9090"), + Floats: []promql.FPoint{ + {T: 1767433020000, F: 1}, + {T: 1767434820000, F: 1}, + {T: 1767436620000, F: 1}, + }, + }, + } + + examples.Set("matrixResult", &base.Example{ + Summary: "Range query: rate(prometheus_http_requests_total[5m])", + Value: matrixExample(matrixResult), + }) + + // TODO: Add native histogram example. + + return examples +} + +// labelsResponseExamples returns examples for /labels response. +func labelsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("labelNames", &base.Example{ + Summary: "List of label names", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []string{ + "__name__", "active", "address", "alertmanager", "alertname", "alertstate", + "backend", "branch", "code", "collector", "component", "device", + "env", "endpoint", "fstype", "handler", "instance", "job", + "le", "method", "mode", "name", + }, + }), + }) + + return examples +} + +// seriesResponseExamples returns examples for /series response. +func seriesResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("seriesList", &base.Example{ + Summary: "List of series matching the selector", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []map[string]string{ + { + "__name__": "up", + "env": "demo", + "instance": "demo.prometheus.io:8080", + "job": "cadvisor", + }, + { + "__name__": "up", + "env": "demo", + "instance": "demo.prometheus.io:9093", + "job": "alertmanager", + }, + { + "__name__": "up", + "env": "demo", + "instance": "demo.prometheus.io:9100", + "job": "node", + }, + { + "__name__": "up", + "instance": "demo.prometheus.io:3000", + "job": "grafana", + }, + { + "__name__": "up", + "instance": "demo.prometheus.io:8996", + "job": "random", + }, + }, + }), + }) + + return examples +} + +// targetsResponseExamples returns examples for /targets response. +func targetsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("targetsList", &base.Example{ + Summary: "Active and dropped targets", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "activeTargets": []map[string]any{ + { + "discoveredLabels": map[string]string{ + "__address__": "demo.prometheus.io:9093", + "__meta_filepath": "/etc/prometheus/file_sd/alertmanager.yml", + "__metrics_path__": "/metrics", + "__scheme__": "http", + "env": "demo", + "job": "alertmanager", + }, + "labels": map[string]string{ + "env": "demo", + "instance": "demo.prometheus.io:9093", + "job": "alertmanager", + }, + "scrapePool": "alertmanager", + "scrapeUrl": "http://demo.prometheus.io:9093/metrics", + "globalUrl": "http://demo.prometheus.io:9093/metrics", + "lastError": "", + "lastScrape": "2026-01-02T13:36:40.200Z", + "lastScrapeDuration": 0.006576866, + "health": "up", + "scrapeInterval": "15s", + "scrapeTimeout": "10s", + }, + }, + "droppedTargets": []map[string]any{}, + "droppedTargetCounts": map[string]int{ + "alertmanager": 0, + "blackbox": 0, + "caddy": 0, + "cadvisor": 0, + "grafana": 0, + "node": 0, + "prometheus": 0, + "random": 0, + }, + }, + }), + }) + + return examples +} + +// rulesResponseExamples returns examples for /rules response. +func rulesResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("ruleGroups", &base.Example{ + Summary: "Alerting and recording rules", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "groups": []map[string]any{ + { + "name": "ansible managed alert rules", + "file": "/etc/prometheus/rules/ansible_managed.yml", + "interval": 15, + "limit": 0, + "rules": []map[string]any{ + { + "state": "firing", + "name": "Watchdog", + "query": "vector(1)", + "duration": 600, + "keepFiringFor": 0, + "labels": map[string]string{"severity": "warning"}, + "annotations": map[string]string{"description": "This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the \"DeadMansSnitch\" integration in PagerDuty.", "summary": "Ensure entire alerting pipeline is functional"}, + "health": "ok", + "evaluationTime": 0.000356688, + "lastEvaluation": "2026-01-02T13:36:56.874Z", + "type": "alerting", + }, + }, + "evaluationTime": 0.000561635, + "lastEvaluation": "2026-01-02T13:36:56.874Z", + }, + }, + }, + }), + }) + + return examples +} + +// alertsResponseExamples returns examples for /alerts response. +func alertsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("activeAlerts", &base.Example{ + Summary: "Currently active alerts", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "alerts": []map[string]any{ + { + "labels": map[string]string{ + "alertname": "Watchdog", + "severity": "warning", + }, + "annotations": map[string]string{ + "description": "This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the \"DeadMansSnitch\" integration in PagerDuty.", + "summary": "Ensure entire alerting pipeline is functional", + }, + "state": "firing", + "activeAt": "2026-01-02T13:30:00.000Z", + "value": "1e+00", + }, + }, + }, + }), + }) + + return examples +} + +// queryExemplarsResponseExamples returns examples for /query_exemplars response. +func queryExemplarsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("exemplarsResult", &base.Example{ + Summary: "Exemplars for a metric with trace IDs", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []map[string]any{ + { + "seriesLabels": map[string]string{ + "__name__": "http_requests_total", + "job": "api-server", + "method": "GET", + }, + "exemplars": []map[string]any{ + { + "labels": map[string]string{ + "traceID": "abc123def456", + }, + "value": "1.5", + "timestamp": 1689956451.781, + }, + }, + }, + }, + }), + }) + + return examples +} + +// formatQueryResponseExamples returns examples for /format_query response. +func formatQueryResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("formattedQuery", &base.Example{ + Summary: "Formatted PromQL query", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": "sum by(job, status) (rate(http_requests_total[5m]))", + }), + }) + + return examples +} + +// parseQueryResponseExamples returns examples for /parse_query response. +func parseQueryResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("parsedQuery", &base.Example{ + Summary: "Parsed PromQL expression tree", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "resultType": "vector", + }, + }), + }) + + return examples +} + +// labelValuesResponseExamples returns examples for /label/{name}/values response. +func labelValuesResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("labelValues", &base.Example{ + Summary: "List of values for a label", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []string{"alertmanager", "blackbox", "caddy", "cadvisor", "grafana", "node", "prometheus", "random"}, + }), + }) + + return examples +} + +// metadataResponseExamples returns examples for /metadata response. +func metadataResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("metricMetadata", &base.Example{ + Summary: "Metadata for metrics", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string][]map[string]any{ + "prometheus_rule_group_iterations_missed_total": { + { + "type": "counter", + "help": "The total number of rule group evaluations missed due to slow rule group evaluation.", + "unit": "", + }, + }, + "prometheus_sd_updates_total": { + { + "type": "counter", + "help": "Total number of update events sent to the SD consumers.", + "unit": "", + }, + }, + "go_gc_stack_starting_size_bytes": { + { + "type": "gauge", + "help": "The stack size of new goroutines. Sourced from /gc/stack/starting-size:bytes.", + "unit": "", + }, + }, + }, + }), + }) + + return examples +} + +// scrapePoolsResponseExamples returns examples for /scrape_pools response. +func scrapePoolsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("scrapePoolsList", &base.Example{ + Summary: "List of scrape pool names", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "scrapePools": []string{"alertmanager", "blackbox", "caddy", "cadvisor", "grafana", "node", "prometheus", "random"}, + }, + }), + }) + + return examples +} + +// targetsMetadataResponseExamples returns examples for /targets/metadata response. +func targetsMetadataResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("targetMetadata", &base.Example{ + Summary: "Metadata for targets", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []map[string]any{ + { + "target": map[string]string{ + "instance": "localhost:9090", + "job": "prometheus", + }, + "type": "gauge", + "help": "The current health status of the target", + "unit": "", + "metric": "up", + }, + }, + }), + }) + + return examples +} + +// targetsRelabelStepsResponseExamples returns examples for /targets/relabel_steps response. +func targetsRelabelStepsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("relabelSteps", &base.Example{ + Summary: "Relabel steps for a target", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "steps": []map[string]any{ + { + "rule": map[string]any{ + "source_labels": []string{"__address__"}, + "target_label": "instance", + "action": "replace", + "regex": "(.*)", + "replacement": "$1", + }, + "output": map[string]string{ + "__address__": "localhost:9090", + "instance": "localhost:9090", + "job": "prometheus", + }, + "keep": true, + }, + }, + }, + }), + }) + + return examples +} + +// alertmanagersResponseExamples returns examples for /alertmanagers response. +func alertmanagersResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("alertmanagerDiscovery", &base.Example{ + Summary: "Alertmanager discovery results", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "activeAlertmanagers": []map[string]any{ + { + "url": "http://demo.prometheus.io:9093/api/v2/alerts", + }, + }, + "droppedAlertmanagers": []map[string]any{}, + }, + }), + }) + + return examples +} + +// statusConfigResponseExamples returns examples for /status/config response. +func statusConfigResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("configYAML", &base.Example{ + Summary: "Prometheus configuration", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "yaml": "global:\n scrape_interval: 15s\n scrape_timeout: 10s\n evaluation_interval: 15s\n external_labels:\n environment: demo-prometheus-io\nalerting:\n alertmanagers:\n - scheme: http\n static_configs:\n - targets:\n - demo.prometheus.io:9093\nrule_files:\n- /etc/prometheus/rules/*.yml\n", + }, + }), + }) + + return examples +} + +// statusRuntimeInfoResponseExamples returns examples for /status/runtimeinfo response. +func statusRuntimeInfoResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("runtimeInfo", &base.Example{ + Summary: "Runtime information", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "startTime": "2026-01-01T13:37:00.000Z", + "CWD": "/", + "hostname": "demo-prometheus-io", + "serverTime": "2026-01-02T13:37:00.000Z", + "reloadConfigSuccess": true, + "lastConfigTime": "2026-01-01T13:37:00.000Z", + "corruptionCount": 0, + "goroutineCount": 88, + "GOMAXPROCS": 2, + "GOMEMLIMIT": int64(3703818240), + "GOGC": "75", + "GODEBUG": "", + "storageRetention": "31d", + }, + }), + }) + + return examples +} + +// statusBuildInfoResponseExamples returns examples for /status/buildinfo response. +func statusBuildInfoResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("buildInfo", &base.Example{ + Summary: "Build information", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "version": "3.7.3", + "revision": "0a41f0000705c69ab8e0f9a723fc73e39ed62b07", + "branch": "HEAD", + "buildUser": "root@08c890a84441", + "buildDate": "20251030-07:26:10", + "goVersion": "go1.25.3", + }, + }), + }) + + return examples +} + +// statusFlagsResponseExamples returns examples for /status/flags response. +func statusFlagsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("flags", &base.Example{ + Summary: "Command-line flags", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]string{ + "agent": "false", + "alertmanager.notification-queue-capacity": "10000", + "config.file": "/etc/prometheus/prometheus.yml", + "enable-feature": "exemplar-storage,native-histograms", + "query.max-concurrency": "20", + "query.timeout": "2m", + "storage.tsdb.path": "/prometheus", + "storage.tsdb.retention.time": "15d", + "web.console.libraries": "/usr/share/prometheus/console_libraries", + "web.console.templates": "/usr/share/prometheus/consoles", + "web.enable-admin-api": "true", + "web.enable-lifecycle": "true", + "web.listen-address": "0.0.0.0:9090", + "web.page-title": "Prometheus Time Series Collection and Processing Server", + }, + }), + }) + + return examples +} + +// statusTSDBResponseExamples returns examples for /status/tsdb response. +func statusTSDBResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("tsdbStats", &base.Example{ + Summary: "TSDB statistics", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "headStats": map[string]any{ + "numSeries": 9925, + "numLabelPairs": 2512, + "chunkCount": 37525, + "minTime": int64(1767362400712), + "maxTime": int64(1767436620000), + }, + "seriesCountByMetricName": []map[string]any{ + { + "name": "up", + "value": 100, + }, + { + "name": "http_requests_total", + "value": 500, + }, + }, + "labelValueCountByLabelName": []map[string]any{ + { + "name": "__name__", + "value": 5, + }, + { + "name": "job", + "value": 3, + }, + }, + "memoryInBytesByLabelName": []map[string]any{ + { + "name": "__name__", + "value": 1024, + }, + { + "name": "job", + "value": 512, + }, + }, + "seriesCountByLabelValuePair": []map[string]any{ + { + "name": "job=prometheus", + "value": 100, + }, + { + "name": "instance=localhost:9090", + "value": 100, + }, + }, + }, + }), + }) + + return examples +} + +// statusTSDBBlocksResponseExamples returns examples for /status/tsdb/blocks response. +func statusTSDBBlocksResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("tsdbBlocks", &base.Example{ + Summary: "TSDB block information", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "blocks": []map[string]any{ + { + "ulid": "01KC4D6GXQA4CRHYKV78NEBVAE", + "minTime": int64(1764568801099), + "maxTime": int64(1764763200000), + "stats": map[string]any{ + "numSamples": 129505582, + "numSeries": 10661, + "numChunks": 1073962, + }, + "compaction": map[string]any{ + "level": 4, + "sources": []string{ + "01KBCJ7TR8A4QAJ3AA1J651P5S", + "01KBCS3J0E34567YPB8Y5W0E24", + "01KBCZZ9KRTYGG3E7HVQFGC3S3", + }, + }, + "version": 1, + }, + }, + }, + }), + }) + + return examples +} + +// statusWALReplayResponseExamples returns examples for /status/walreplay response. +func statusWALReplayResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("walReplay", &base.Example{ + Summary: "WAL replay status", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "min": 3209, + "max": 3214, + "current": 3214, + }, + }), + }) + + return examples +} + +// deleteSeriesResponseExamples returns examples for /admin/tsdb/delete_series response. +func deleteSeriesResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("deletionSuccess", &base.Example{ + Summary: "Successful series deletion", + Value: createYAMLNode(map[string]any{ + "status": "success", + }), + }) + + return examples +} + +// cleanTombstonesResponseExamples returns examples for /admin/tsdb/clean_tombstones response. +func cleanTombstonesResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("tombstonesCleaned", &base.Example{ + Summary: "Tombstones cleaned successfully", + Value: createYAMLNode(map[string]any{ + "status": "success", + }), + }) + + return examples +} + +// seriesDeleteResponseExamples returns examples for DELETE /series response. +func seriesDeleteResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("seriesDeleted", &base.Example{ + Summary: "Series marked for deletion", + Value: createYAMLNode(map[string]any{ + "status": "success", + }), + }) + + return examples +} + +// snapshotResponseExamples returns examples for /admin/tsdb/snapshot response. +func snapshotResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("snapshotCreated", &base.Example{ + Summary: "Snapshot created successfully", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": map[string]any{ + "name": "20260102T133700Z-a1b2c3d4e5f67890", + }, + }), + }) + + return examples +} + +// notificationsResponseExamples returns examples for /notifications response. +func notificationsResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("notifications", &base.Example{ + Summary: "Server notifications", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []map[string]any{ + { + "text": "Configuration reload has failed.", + "date": "2026-01-02T16:14:50.046Z", + "active": true, + }, + }, + }), + }) + + return examples +} + +// notificationLiveExamples provides example SSE messages for the live notifications endpoint. +func notificationLiveExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("activeNotification", &base.Example{ + Summary: "Active notification SSE message", + Description: "An SSE message containing an active server notification.", + Value: createYAMLNode(map[string]any{ + "data": "{\"text\":\"Configuration reload has failed.\",\"date\":\"2026-01-02T16:14:50.046Z\",\"active\":true}", + }), + }) + + return examples +} + +// featuresResponseExamples returns examples for /features response. +func featuresResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("enabledFeatures", &base.Example{ + Summary: "Enabled feature flags", + Value: createYAMLNode(map[string]any{ + "status": "success", + "data": []string{"exemplar-storage", "remote-write-receiver"}, + }), + }) + + return examples +} + +// errorResponseExamples returns examples for error responses. +func errorResponseExamples() *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + + examples.Set("tsdbNotReady", &base.Example{ + Summary: "TSDB not ready", + Value: createYAMLNode(map[string]any{ + "status": "error", + "errorType": "internal", + "error": "TSDB not ready", + }), + }) + + return examples +} diff --git a/web/api/v1/openapi_golden_test.go b/web/api/v1/openapi_golden_test.go new file mode 100644 index 0000000000..6207fda81b --- /dev/null +++ b/web/api/v1/openapi_golden_test.go @@ -0,0 +1,176 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "flag" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/prometheus/prometheus/web/api/testhelpers" +) + +var updateOpenAPISpec = flag.Bool("update-openapi-spec", false, "update openapi golden files with the current specs") + +// TestOpenAPIGolden_3_1 verifies that the OpenAPI 3.1 spec matches the golden file. +func TestOpenAPIGolden_3_1(t *testing.T) { + // Create an API instance to serve the OpenAPI spec. + api := newTestAPI(t, testhelpers.APIConfig{}) + + // Fetch the OpenAPI 3.1 spec from the API (default, no query param). + resp := testhelpers.GET(t, api, "/api/v1/openapi.yaml") + require.Equal(t, 200, resp.StatusCode, "expected HTTP 200 for OpenAPI spec endpoint") + require.NotEmpty(t, resp.Body, "OpenAPI spec should not be empty") + + goldenPath := filepath.Join("testdata", "openapi_3.1_golden.yaml") + + if *updateOpenAPISpec { + // Update mode: write the current spec to the golden file. + t.Logf("Updating golden file: %s", goldenPath) + + // Ensure the testdata directory exists. + err := os.MkdirAll(filepath.Dir(goldenPath), 0o755) + require.NoError(t, err, "failed to create testdata directory") + + // Write the golden file. + err = os.WriteFile(goldenPath, []byte(resp.Body), 0o644) + require.NoError(t, err, "failed to write golden file") + + t.Logf("Golden file updated successfully") + return + } + + // Comparison mode: verify the spec matches the golden file. + goldenData, err := os.ReadFile(goldenPath) + require.NoError(t, err, "failed to read golden file (run with -update-openapi-spec to generate it)") + + require.Equal(t, string(goldenData), resp.Body, + "OpenAPI 3.1 spec does not match golden file. Run 'go test -update-openapi-spec' to update.") + + // Verify version field is 3.1.0. + var spec map[string]any + err = yaml.Unmarshal([]byte(resp.Body), &spec) + require.NoError(t, err) + require.Equal(t, "3.1.0", spec["openapi"], "OpenAPI version should be 3.1.0") + + // Verify /notifications/live is NOT present in 3.1 spec. + paths := spec["paths"].(map[string]any) + _, found := paths["/notifications/live"] + require.False(t, found, "/notifications/live should not be in OpenAPI 3.1 spec") +} + +// TestOpenAPIGolden_3_2 verifies that the OpenAPI 3.2 spec matches the golden file. +func TestOpenAPIGolden_3_2(t *testing.T) { + // Create an API instance to serve the OpenAPI spec. + api := newTestAPI(t, testhelpers.APIConfig{}) + + // Fetch the OpenAPI 3.2 spec from the API with query parameter. + resp := testhelpers.GET(t, api, "/api/v1/openapi.yaml?openapi_version=3.2") + require.Equal(t, 200, resp.StatusCode, "expected HTTP 200 for OpenAPI spec endpoint") + require.NotEmpty(t, resp.Body, "OpenAPI spec should not be empty") + + goldenPath := filepath.Join("testdata", "openapi_3.2_golden.yaml") + + if *updateOpenAPISpec { + // Update mode: write the current spec to the golden file. + t.Logf("Updating golden file: %s", goldenPath) + + // Ensure the testdata directory exists. + err := os.MkdirAll(filepath.Dir(goldenPath), 0o755) + require.NoError(t, err, "failed to create testdata directory") + + // Write the golden file. + err = os.WriteFile(goldenPath, []byte(resp.Body), 0o644) + require.NoError(t, err, "failed to write golden file") + + t.Logf("Golden file updated successfully") + return + } + + // Comparison mode: verify the spec matches the golden file. + goldenData, err := os.ReadFile(goldenPath) + require.NoError(t, err, "failed to read golden file (run with -update-openapi-spec to generate it)") + + require.Equal(t, string(goldenData), resp.Body, + "OpenAPI 3.2 spec does not match golden file. Run 'go test -update-openapi-spec' to update.") + + // Verify version field is 3.2.0. + var spec map[string]any + err = yaml.Unmarshal([]byte(resp.Body), &spec) + require.NoError(t, err) + require.Equal(t, "3.2.0", spec["openapi"], "OpenAPI version should be 3.2.0") + + // Verify /notifications/live IS present in 3.2 spec. + paths := spec["paths"].(map[string]any) + _, found := paths["/notifications/live"] + require.True(t, found, "/notifications/live should be in OpenAPI 3.2 spec") +} + +// TestOpenAPIVersionSelection verifies version query parameter handling. +func TestOpenAPIVersionSelection(t *testing.T) { + api := newTestAPI(t, testhelpers.APIConfig{}) + + tests := []struct { + name string + url string + expectedVersion string + expectLivePath bool + }{ + { + name: "default to 3.1.0", + url: "/api/v1/openapi.yaml", + expectedVersion: "3.1.0", + expectLivePath: false, + }, + { + name: "explicit 3.1", + url: "/api/v1/openapi.yaml?openapi_version=3.1", + expectedVersion: "3.1.0", + expectLivePath: false, + }, + { + name: "explicit 3.2", + url: "/api/v1/openapi.yaml?openapi_version=3.2", + expectedVersion: "3.2.0", + expectLivePath: true, + }, + { + name: "invalid version defaults to 3.1.0", + url: "/api/v1/openapi.yaml?openapi_version=4.0", + expectedVersion: "3.1.0", + expectLivePath: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + resp := testhelpers.GET(t, api, tc.url) + require.Equal(t, 200, resp.StatusCode) + + var spec map[string]any + err := yaml.Unmarshal([]byte(resp.Body), &spec) + require.NoError(t, err) + + require.Equal(t, tc.expectedVersion, spec["openapi"]) + + paths := spec["paths"].(map[string]any) + _, found := paths["/notifications/live"] + require.Equal(t, tc.expectLivePath, found) + }) + } +} diff --git a/web/api/v1/openapi_helpers.go b/web/api/v1/openapi_helpers.go new file mode 100644 index 0000000000..76f6001693 --- /dev/null +++ b/web/api/v1/openapi_helpers.go @@ -0,0 +1,343 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "time" + + jsoniter "github.com/json-iterator/go" + "github.com/pb33f/libopenapi/datamodel/high/base" + v3 "github.com/pb33f/libopenapi/datamodel/high/v3" + "github.com/pb33f/libopenapi/orderedmap" + yaml "go.yaml.in/yaml/v4" + + "github.com/prometheus/prometheus/promql" +) + +// Helper functions for building common structures. + +// exampleTime is a reference time used for timestamp examples. +var exampleTime = time.Date(2026, 1, 2, 13, 37, 0, 0, time.UTC) + +func boolPtr(b bool) *bool { + return &b +} + +func int64Ptr(i int64) *int64 { + return &i +} + +type example struct { + name string + value any +} + +// exampleMap creates an Examples map from the provided examples. +func exampleMap(exs []example) *orderedmap.Map[string, *base.Example] { + examples := orderedmap.New[string, *base.Example]() + for _, ex := range exs { + examples.Set(ex.name, &base.Example{ + Value: createYAMLNode(ex.value), + }) + } + return examples +} + +func schemaRef(ref string) *base.SchemaProxy { + return base.CreateSchemaProxyRef(ref) +} + +func schemaFromType(t string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{Type: []string{t}}) +} + +func stringSchema() *base.SchemaProxy { + return schemaFromType("string") +} + +func integerSchema() *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"integer"}, + Format: "int64", + }) +} + +func stringSchemaWithDescription(description string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Description: description, + }) +} + +func stringSchemaWithDescriptionAndExample(description string, example any) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Description: description, + Example: createYAMLNode(example), + }) +} + +func integerSchemaWithDescription(description string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"integer"}, + Format: "int64", + Description: description, + }) +} + +func integerSchemaWithDescriptionAndExample(description string, example any) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"integer"}, + Format: "int64", + Description: description, + Example: createYAMLNode(example), + }) +} + +func stringArraySchemaWithDescription(description string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + Description: description, + }) +} + +func stringArraySchemaWithDescriptionAndExample(description string, example any) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + Description: description, + Example: createYAMLNode(example), + }) +} + +func statusSchema() *base.SchemaProxy { + successNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "success"} + errorNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "error"} + exampleNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "success"} + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Enum: []*yaml.Node{successNode, errorNode}, + Description: "Response status.", + Example: exampleNode, + }) +} + +func warningsSchema() *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + Description: "Only set if there were warnings while executing the request. There will still be data in the data field.", + }) +} + +func infosSchema() *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + Description: "Only set if there were info-level annotations while executing the request.", + }) +} + +func timestampSchema() *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Format: "date-time", + Description: "RFC3339 timestamp.", + }), + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"number"}, + Format: "unixtime", + Description: "Unix timestamp in seconds.", + }), + }, + Description: "Timestamp in RFC3339 format or Unix timestamp in seconds.", + }) +} + +func stringSchemaWithConstValue(value string) *base.SchemaProxy { + node := &yaml.Node{Kind: yaml.ScalarNode, Value: value} + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Enum: []*yaml.Node{node}, + }) +} + +func dateTimeSchemaWithDescription(description string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Format: "date-time", + Description: description, + }) +} + +func numberSchemaWithDescription(description string) *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"number"}, + Format: "double", + Description: description, + }) +} + +func errorResponse() *v3.Response { + content := orderedmap.New[string, *v3.MediaType]() + content.Set("application/json", &v3.MediaType{ + Schema: schemaRef("#/components/schemas/Error"), + }) + return &v3.Response{ + Description: "Error", + Content: content, + } +} + +func noContentResponse() *v3.Response { + return &v3.Response{Description: "No Content"} +} + +func responsesNoContent() *v3.Responses { + codes := orderedmap.New[string, *v3.Response]() + codes.Set("204", noContentResponse()) + codes.Set("default", errorResponse()) + return &v3.Responses{Codes: codes} +} + +func pathParam(name, description string, schema *base.SchemaProxy) *v3.Parameter { + return &v3.Parameter{ + Name: name, + In: "path", + Description: description, + Required: boolPtr(true), + Schema: schema, + } +} + +// createYAMLNode converts Go data to yaml.Node for use in examples. +func createYAMLNode(data any) *yaml.Node { + node := &yaml.Node{} + bytes, _ := yaml.Marshal(data) + _ = yaml.Unmarshal(bytes, node) + return node +} + +// formRequestBodyWithExamples creates a form-encoded request body with examples. +func formRequestBodyWithExamples(schemaRef string, examples *orderedmap.Map[string, *base.Example], description string) *v3.RequestBody { + content := orderedmap.New[string, *v3.MediaType]() + mediaType := &v3.MediaType{ + Schema: base.CreateSchemaProxyRef("#/components/schemas/" + schemaRef), + } + if examples != nil { + mediaType.Examples = examples + } + content.Set("application/x-www-form-urlencoded", mediaType) + return &v3.RequestBody{ + Required: boolPtr(true), + Description: description, + Content: content, + } +} + +// jsonResponseWithExamples creates a JSON response with examples. +func jsonResponseWithExamples(schemaRef string, examples *orderedmap.Map[string, *base.Example], description string) *v3.Response { + content := orderedmap.New[string, *v3.MediaType]() + mediaType := &v3.MediaType{ + Schema: base.CreateSchemaProxyRef("#/components/schemas/" + schemaRef), + } + if examples != nil { + mediaType.Examples = examples + } + content.Set("application/json", mediaType) + return &v3.Response{ + Description: description, + Content: content, + } +} + +// responsesWithErrorExamples creates responses with both success and error examples. +func responsesWithErrorExamples(okSchemaRef string, successExamples, errorExamples *orderedmap.Map[string, *base.Example], successDescription, errorDescription string) *v3.Responses { + codes := orderedmap.New[string, *v3.Response]() + codes.Set("200", jsonResponseWithExamples(okSchemaRef, successExamples, successDescription)) + codes.Set("default", jsonResponseWithExamples("Error", errorExamples, errorDescription)) + return &v3.Responses{Codes: codes} +} + +// timestampExamples returns examples for timestamp parameters (RFC3339 and epoch). +func timestampExamples(t time.Time) []example { + return []example{ + {"RFC3339", t.Format(time.RFC3339Nano)}, + {"epoch", t.Unix()}, + } +} + +// queryParamWithExample creates a query parameter with examples. +func queryParamWithExample(name, description string, required bool, schema *base.SchemaProxy, examples []example) *v3.Parameter { + param := &v3.Parameter{ + Name: name, + In: "query", + Description: description, + Required: &required, + Explode: boolPtr(false), + Schema: schema, + } + if len(examples) > 0 { + param.Examples = exampleMap(examples) + } + return param +} + +// marshalToYAMLNode marshals a value using jsoniter (production marshaling) and converts to yaml.Node. +// The result is an inline JSON representation that preserves integer types for timestamps. +func marshalToYAMLNode(v any) *yaml.Node { + jsonAPI := jsoniter.ConfigCompatibleWithStandardLibrary + jsonBytes, err := jsonAPI.Marshal(v) + if err != nil { + panic(err) + } + node := &yaml.Node{} + if err := yaml.Unmarshal(jsonBytes, node); err != nil { + panic(err) + } + return node +} + +// vectorExample creates an example for a vector query response using production marshaling. +func vectorExample(v promql.Vector) *yaml.Node { + type response struct { + Status string `json:"status"` + Data struct { + ResultType string `json:"resultType"` + Result promql.Vector `json:"result"` + } `json:"data"` + } + resp := response{Status: "success"} + resp.Data.ResultType = "vector" + resp.Data.Result = v + return marshalToYAMLNode(resp) +} + +// matrixExample creates an example for a matrix query response using production marshaling. +func matrixExample(m promql.Matrix) *yaml.Node { + type response struct { + Status string `json:"status"` + Data struct { + ResultType string `json:"resultType"` + Result promql.Matrix `json:"result"` + } `json:"data"` + } + resp := response{Status: "success"} + resp.Data.ResultType = "matrix" + resp.Data.Result = m + return marshalToYAMLNode(resp) +} diff --git a/web/api/v1/openapi_paths.go b/web/api/v1/openapi_paths.go new file mode 100644 index 0000000000..2f5ab592f7 --- /dev/null +++ b/web/api/v1/openapi_paths.go @@ -0,0 +1,626 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file defines all API path specifications including parameters, request bodies, +// and response schemas. Each path definition corresponds to an endpoint registered in api.go. +package v1 + +import ( + "time" + + "github.com/pb33f/libopenapi/datamodel/high/base" + v3 "github.com/pb33f/libopenapi/datamodel/high/v3" + "github.com/pb33f/libopenapi/orderedmap" +) + +// Path definition methods for API endpoints. + +func (*OpenAPIBuilder) queryPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("limit", "The maximum number of metrics to return.", false, integerSchema(), []example{{"example", 100}}), + queryParamWithExample("time", "The evaluation timestamp (optional, defaults to current time).", false, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("query", "The PromQL query to execute.", true, stringSchema(), []example{{"example", "up"}}), + queryParamWithExample("timeout", "Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag.", false, stringSchema(), []example{{"example", "30s"}}), + queryParamWithExample("lookback_delta", "Override the lookback period for this query. Optional.", false, stringSchema(), []example{{"example", "5m"}}), + queryParamWithExample("stats", "When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics.", false, stringSchema(), []example{{"example", "all"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "query", + Summary: "Evaluate an instant query", + Tags: []string{"query"}, + Parameters: params, + Responses: responsesWithErrorExamples("QueryOutputBody", queryResponseExamples(), errorResponseExamples(), "Query executed successfully.", "Error executing query."), + }, + Post: &v3.Operation{ + OperationId: "query-post", + Summary: "Evaluate an instant query", + Tags: []string{"query"}, + RequestBody: formRequestBodyWithExamples("QueryPostInputBody", queryPostExamples(), "Submit an instant query. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("QueryOutputBody", queryResponseExamples(), errorResponseExamples(), "Instant query executed successfully.", "Error executing instant query."), + }, + } +} + +func (*OpenAPIBuilder) queryRangePath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("limit", "The maximum number of metrics to return.", false, integerSchema(), []example{{"example", 100}}), + queryParamWithExample("start", "The start time of the query.", true, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "The end time of the query.", true, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("step", "The step size of the query.", true, stringSchema(), []example{{"example", "15s"}}), + queryParamWithExample("query", "The query to execute.", true, stringSchema(), []example{{"example", "rate(prometheus_http_requests_total{handler=\"/api/v1/query\"}[5m])"}}), + queryParamWithExample("timeout", "Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag.", false, stringSchema(), []example{{"example", "30s"}}), + queryParamWithExample("lookback_delta", "Override the lookback period for this query. Optional.", false, stringSchema(), []example{{"example", "5m"}}), + queryParamWithExample("stats", "When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics.", false, stringSchema(), []example{{"example", "all"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "query-range", + Summary: "Evaluate a range query", + Tags: []string{"query"}, + Parameters: params, + Responses: responsesWithErrorExamples("QueryRangeOutputBody", queryRangeResponseExamples(), errorResponseExamples(), "Range query executed successfully.", "Error executing range query."), + }, + Post: &v3.Operation{ + OperationId: "query-range-post", + Summary: "Evaluate a range query", + Tags: []string{"query"}, + RequestBody: formRequestBodyWithExamples("QueryRangePostInputBody", queryRangePostExamples(), "Submit a range query. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("QueryRangeOutputBody", queryRangeResponseExamples(), errorResponseExamples(), "Range query executed successfully.", "Error executing range query."), + }, + } +} + +func (*OpenAPIBuilder) queryExemplarsPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("start", "Start timestamp for exemplars query.", false, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "End timestamp for exemplars query.", false, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("query", "PromQL query to extract exemplars for.", true, stringSchema(), []example{{"example", "prometheus_http_requests_total"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "query-exemplars", + Summary: "Query exemplars", + Tags: []string{"query"}, + Parameters: params, + Responses: responsesWithErrorExamples("QueryExemplarsOutputBody", queryExemplarsResponseExamples(), errorResponseExamples(), "Exemplars retrieved successfully.", "Error retrieving exemplars."), + }, + Post: &v3.Operation{ + OperationId: "query-exemplars-post", + Summary: "Query exemplars", + Tags: []string{"query"}, + RequestBody: formRequestBodyWithExamples("QueryExemplarsPostInputBody", queryExemplarsPostExamples(), "Submit an exemplars query. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("QueryExemplarsOutputBody", queryExemplarsResponseExamples(), errorResponseExamples(), "Exemplars query completed successfully.", "Error processing exemplars query."), + }, + } +} + +func (*OpenAPIBuilder) formatQueryPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("query", "PromQL expression to format.", true, stringSchema(), []example{{"example", "sum(rate(http_requests_total[5m])) by (job)"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "format-query", + Summary: "Format a PromQL query", + Tags: []string{"query"}, + Parameters: params, + Responses: responsesWithErrorExamples("FormatQueryOutputBody", formatQueryResponseExamples(), errorResponseExamples(), "Query formatted successfully.", "Error formatting query."), + }, + Post: &v3.Operation{ + OperationId: "format-query-post", + Summary: "Format a PromQL query", + Tags: []string{"query"}, + RequestBody: formRequestBodyWithExamples("FormatQueryPostInputBody", formatQueryPostExamples(), "Submit a PromQL query to format. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("FormatQueryOutputBody", formatQueryResponseExamples(), errorResponseExamples(), "Query formatting completed successfully.", "Error formatting query."), + }, + } +} + +func (*OpenAPIBuilder) parseQueryPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("query", "PromQL expression to parse.", true, stringSchema(), []example{{"example", "up{job=\"prometheus\"}"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "parse-query", + Summary: "Parse a PromQL query", + Tags: []string{"query"}, + Parameters: params, + Responses: responsesWithErrorExamples("ParseQueryOutputBody", parseQueryResponseExamples(), errorResponseExamples(), "Query parsed successfully.", "Error parsing query."), + }, + Post: &v3.Operation{ + OperationId: "parse-query-post", + Summary: "Parse a PromQL query", + Tags: []string{"query"}, + RequestBody: formRequestBodyWithExamples("ParseQueryPostInputBody", parseQueryPostExamples(), "Submit a PromQL query to parse. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("ParseQueryOutputBody", parseQueryResponseExamples(), errorResponseExamples(), "Query parsed successfully via POST.", "Error parsing query via POST."), + }, + } +} + +func (*OpenAPIBuilder) labelsPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("start", "Start timestamp for label names query.", false, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "End timestamp for label names query.", false, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("match[]", "Series selector argument.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"{job=\"prometheus\"}"}}}), + queryParamWithExample("limit", "Maximum number of label names to return.", false, integerSchema(), []example{{"example", 100}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "labels", + Summary: "Get label names", + Tags: []string{"labels"}, + Parameters: params, + Responses: responsesWithErrorExamples("LabelsOutputBody", labelsResponseExamples(), errorResponseExamples(), "Label names retrieved successfully.", "Error retrieving label names."), + }, + Post: &v3.Operation{ + OperationId: "labels-post", + Summary: "Get label names", + Tags: []string{"labels"}, + RequestBody: formRequestBodyWithExamples("LabelsPostInputBody", labelsPostExamples(), "Submit a label names query. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("LabelsOutputBody", labelsResponseExamples(), errorResponseExamples(), "Label names retrieved successfully via POST.", "Error retrieving label names via POST."), + }, + } +} + +func (*OpenAPIBuilder) labelValuesPath() *v3.PathItem { + params := []*v3.Parameter{ + pathParam("name", "Label name.", stringSchema()), + queryParamWithExample("start", "Start timestamp for label values query.", false, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "End timestamp for label values query.", false, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("match[]", "Series selector argument.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"{job=\"prometheus\"}"}}}), + queryParamWithExample("limit", "Maximum number of label values to return.", false, integerSchema(), []example{{"example", 1000}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "label-values", + Summary: "Get label values", + Tags: []string{"labels"}, + Parameters: params, + Responses: responsesWithErrorExamples("LabelValuesOutputBody", labelValuesResponseExamples(), errorResponseExamples(), "Label values retrieved successfully.", "Error retrieving label values."), + }, + } +} + +func (*OpenAPIBuilder) seriesPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("start", "Start timestamp for series query.", false, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "End timestamp for series query.", false, timestampSchema(), timestampExamples(exampleTime)), + queryParamWithExample("match[]", "Series selector argument.", true, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"{job=\"prometheus\"}"}}}), + queryParamWithExample("limit", "Maximum number of series to return.", false, integerSchema(), []example{{"example", 100}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "series", + Summary: "Find series by label matchers", + Tags: []string{"series"}, + Parameters: params, + Responses: responsesWithErrorExamples("SeriesOutputBody", seriesResponseExamples(), errorResponseExamples(), "Series returned matching the provided label matchers.", "Error retrieving series."), + }, + Post: &v3.Operation{ + OperationId: "series-post", + Summary: "Find series by label matchers", + Tags: []string{"series"}, + RequestBody: formRequestBodyWithExamples("SeriesPostInputBody", seriesPostExamples(), "Submit a series query. This endpoint accepts the same parameters as the GET version."), + Responses: responsesWithErrorExamples("SeriesOutputBody", seriesResponseExamples(), errorResponseExamples(), "Series returned matching the provided label matchers via POST.", "Error retrieving series via POST."), + }, + Delete: &v3.Operation{ + OperationId: "delete-series", + Summary: "Delete series", + Description: "Delete series matching selectors. Note: This is deprecated, use POST /admin/tsdb/delete_series instead.", + Tags: []string{"series"}, + Responses: responsesWithErrorExamples("SeriesDeleteOutputBody", seriesDeleteResponseExamples(), errorResponseExamples(), "Series marked for deletion.", "Error deleting series."), + }, + } +} + +func (*OpenAPIBuilder) metadataPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("limit", "The maximum number of metrics to return.", false, integerSchema(), []example{{"example", 100}}), + queryParamWithExample("limit_per_metric", "The maximum number of metadata entries per metric.", false, integerSchema(), []example{{"example", 10}}), + queryParamWithExample("metric", "A metric name to filter metadata for.", false, stringSchema(), []example{{"example", "http_requests_total"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-metadata", + Summary: "Get metadata", + Tags: []string{"metadata"}, + Parameters: params, + Responses: responsesWithErrorExamples("MetadataOutputBody", metadataResponseExamples(), errorResponseExamples(), "Metric metadata retrieved successfully.", "Error retrieving metadata."), + }, + } +} + +func (*OpenAPIBuilder) scrapePoolsPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-scrape-pools", + Summary: "Get scrape pools", + Tags: []string{"targets"}, + Responses: responsesWithErrorExamples("ScrapePoolsOutputBody", scrapePoolsResponseExamples(), errorResponseExamples(), "Scrape pools retrieved successfully.", "Error retrieving scrape pools."), + }, + } +} + +func (*OpenAPIBuilder) targetsPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("scrapePool", "Filter targets by scrape pool name.", false, stringSchema(), []example{{"example", "prometheus"}}), + queryParamWithExample("state", "Filter by state: active, dropped, or any.", false, stringSchema(), []example{{"example", "active"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-targets", + Summary: "Get targets", + Tags: []string{"targets"}, + Parameters: params, + Responses: responsesWithErrorExamples("TargetsOutputBody", targetsResponseExamples(), errorResponseExamples(), "Target discovery information retrieved successfully.", "Error retrieving targets."), + }, + } +} + +func (*OpenAPIBuilder) targetsMetadataPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("match_target", "Label selector to filter targets.", false, stringSchema(), []example{{"example", "{job=\"prometheus\"}"}}), + queryParamWithExample("metric", "Metric name to retrieve metadata for.", false, stringSchema(), []example{{"example", "http_requests_total"}}), + queryParamWithExample("limit", "Maximum number of targets to match.", false, integerSchema(), []example{{"example", 10}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-targets-metadata", + Summary: "Get targets metadata", + Tags: []string{"targets"}, + Parameters: params, + Responses: responsesWithErrorExamples("TargetMetadataOutputBody", targetsMetadataResponseExamples(), errorResponseExamples(), "Target metadata retrieved successfully.", "Error retrieving target metadata."), + }, + } +} + +func (*OpenAPIBuilder) targetsRelabelStepsPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("scrapePool", "Name of the scrape pool.", true, stringSchema(), []example{{"example", "prometheus"}}), + queryParamWithExample("labels", "JSON-encoded labels to apply relabel rules to.", true, stringSchema(), []example{{"example", "{\"__address__\":\"localhost:9090\",\"job\":\"prometheus\"}"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-targets-relabel-steps", + Summary: "Get targets relabel steps", + Tags: []string{"targets"}, + Parameters: params, + Responses: responsesWithErrorExamples("TargetRelabelStepsOutputBody", targetsRelabelStepsResponseExamples(), errorResponseExamples(), "Relabel steps retrieved successfully.", "Error retrieving relabel steps."), + }, + } +} + +func (*OpenAPIBuilder) rulesPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("type", "Filter by rule type: alert or record.", false, stringSchema(), []example{{"example", "alert"}}), + queryParamWithExample("rule_name[]", "Filter by rule name.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"HighErrorRate"}}}), + queryParamWithExample("rule_group[]", "Filter by rule group name.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"example_alerts"}}}), + queryParamWithExample("file[]", "Filter by file path.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"/etc/prometheus/rules.yml"}}}), + queryParamWithExample("match[]", "Label matchers to filter rules.", false, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"{severity=\"critical\"}"}}}), + queryParamWithExample("exclude_alerts", "Exclude active alerts from response.", false, stringSchema(), []example{{"example", "false"}}), + queryParamWithExample("group_limit", "Maximum number of rule groups to return.", false, integerSchema(), []example{{"example", 100}}), + queryParamWithExample("group_next_token", "Pagination token for next page.", false, stringSchema(), []example{{"example", "abc123"}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "rules", + Summary: "Get alerting and recording rules", + Tags: []string{"rules"}, + Parameters: params, + Responses: responsesWithErrorExamples("RulesOutputBody", rulesResponseExamples(), errorResponseExamples(), "Rules retrieved successfully.", "Error retrieving rules."), + }, + } +} + +func (*OpenAPIBuilder) alertsPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "alerts", + Summary: "Get active alerts", + Tags: []string{"alerts"}, + Responses: responsesWithErrorExamples("AlertsOutputBody", alertsResponseExamples(), errorResponseExamples(), "Active alerts retrieved successfully.", "Error retrieving alerts."), + }, + } +} + +func (*OpenAPIBuilder) alertmanagersPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "alertmanagers", + Summary: "Get Alertmanager discovery", + Tags: []string{"alerts"}, + Responses: responsesWithErrorExamples("AlertmanagersOutputBody", alertmanagersResponseExamples(), errorResponseExamples(), "Alertmanager targets retrieved successfully.", "Error retrieving Alertmanager targets."), + }, + } +} + +func (*OpenAPIBuilder) statusConfigPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-status-config", + Summary: "Get status config", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusConfigOutputBody", statusConfigResponseExamples(), errorResponseExamples(), "Configuration retrieved successfully.", "Error retrieving configuration."), + }, + } +} + +func (*OpenAPIBuilder) statusRuntimeInfoPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-status-runtimeinfo", + Summary: "Get status runtimeinfo", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusRuntimeInfoOutputBody", statusRuntimeInfoResponseExamples(), errorResponseExamples(), "Runtime information retrieved successfully.", "Error retrieving runtime information."), + }, + } +} + +func (*OpenAPIBuilder) statusBuildInfoPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-status-buildinfo", + Summary: "Get status buildinfo", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusBuildInfoOutputBody", statusBuildInfoResponseExamples(), errorResponseExamples(), "Build information retrieved successfully.", "Error retrieving build information."), + }, + } +} + +func (*OpenAPIBuilder) statusFlagsPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-status-flags", + Summary: "Get status flags", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusFlagsOutputBody", statusFlagsResponseExamples(), errorResponseExamples(), "Command-line flags retrieved successfully.", "Error retrieving flags."), + }, + } +} + +func (*OpenAPIBuilder) statusTSDBPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("limit", "The maximum number of items to return per category.", false, integerSchema(), []example{{"example", 10}}), + } + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "status-tsdb", + Summary: "Get TSDB status", + Tags: []string{"status"}, + Parameters: params, + Responses: responsesWithErrorExamples("StatusTSDBOutputBody", statusTSDBResponseExamples(), errorResponseExamples(), "TSDB status retrieved successfully.", "Error retrieving TSDB status."), + }, + } +} + +func (*OpenAPIBuilder) statusTSDBBlocksPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "status-tsdb-blocks", + Summary: "Get TSDB blocks information", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusTSDBBlocksOutputBody", statusTSDBBlocksResponseExamples(), errorResponseExamples(), "TSDB blocks information retrieved successfully.", "Error retrieving TSDB blocks."), + }, + } +} + +func (*OpenAPIBuilder) statusWALReplayPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-status-walreplay", + Summary: "Get status walreplay", + Tags: []string{"status"}, + Responses: responsesWithErrorExamples("StatusWALReplayOutputBody", statusWALReplayResponseExamples(), errorResponseExamples(), "WAL replay status retrieved successfully.", "Error retrieving WAL replay status."), + }, + } +} + +func (*OpenAPIBuilder) adminDeleteSeriesPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("match[]", "Series selectors to identify series to delete.", true, base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + }), []example{{"example", []string{"{__name__=~\"test.*\"}"}}}), + queryParamWithExample("start", "Start timestamp for deletion.", false, timestampSchema(), timestampExamples(exampleTime.Add(-1*time.Hour))), + queryParamWithExample("end", "End timestamp for deletion.", false, timestampSchema(), timestampExamples(exampleTime)), + } + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "deleteSeriesPost", + Summary: "Delete series matching selectors", + Description: "Deletes data for a selection of series in a time range.", + Tags: []string{"admin"}, + Parameters: params, + Responses: responsesWithErrorExamples("DeleteSeriesOutputBody", deleteSeriesResponseExamples(), errorResponseExamples(), "Series deleted successfully.", "Error deleting series."), + }, + Put: &v3.Operation{ + OperationId: "deleteSeriesPut", + Summary: "Delete series matching selectors via PUT", + Description: "Deletes data for a selection of series in a time range using PUT method.", + Tags: []string{"admin"}, + Parameters: params, + Responses: responsesWithErrorExamples("DeleteSeriesOutputBody", deleteSeriesResponseExamples(), errorResponseExamples(), "Series deleted successfully via PUT.", "Error deleting series via PUT."), + }, + } +} + +func (*OpenAPIBuilder) adminCleanTombstonesPath() *v3.PathItem { + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "cleanTombstonesPost", + Summary: "Clean tombstones in the TSDB", + Description: "Removes deleted data from disk and cleans up existing tombstones.", + Tags: []string{"admin"}, + Responses: responsesWithErrorExamples("CleanTombstonesOutputBody", cleanTombstonesResponseExamples(), errorResponseExamples(), "Tombstones cleaned successfully.", "Error cleaning tombstones."), + }, + Put: &v3.Operation{ + OperationId: "cleanTombstonesPut", + Summary: "Clean tombstones in the TSDB via PUT", + Description: "Removes deleted data from disk and cleans up existing tombstones using PUT method.", + Tags: []string{"admin"}, + Responses: responsesWithErrorExamples("CleanTombstonesOutputBody", cleanTombstonesResponseExamples(), errorResponseExamples(), "Tombstones cleaned successfully via PUT.", "Error cleaning tombstones via PUT."), + }, + } +} + +func (*OpenAPIBuilder) adminSnapshotPath() *v3.PathItem { + params := []*v3.Parameter{ + queryParamWithExample("skip_head", "If true, do not snapshot data in the head block.", false, stringSchema(), []example{{"example", "false"}}), + } + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "snapshotPost", + Summary: "Create a snapshot of the TSDB", + Description: "Creates a snapshot of all current data.", + Tags: []string{"admin"}, + Parameters: params, + Responses: responsesWithErrorExamples("SnapshotOutputBody", snapshotResponseExamples(), errorResponseExamples(), "Snapshot created successfully.", "Error creating snapshot."), + }, + Put: &v3.Operation{ + OperationId: "snapshotPut", + Summary: "Create a snapshot of the TSDB via PUT", + Description: "Creates a snapshot of all current data using PUT method.", + Tags: []string{"admin"}, + Parameters: params, + Responses: responsesWithErrorExamples("SnapshotOutputBody", snapshotResponseExamples(), errorResponseExamples(), "Snapshot created successfully via PUT.", "Error creating snapshot via PUT."), + }, + } +} + +func (*OpenAPIBuilder) remoteReadPath() *v3.PathItem { + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "remoteRead", + Summary: "Remote read endpoint", + Description: "Prometheus remote read endpoint for federated queries. Accepts and returns Protocol Buffer encoded data.", + Tags: []string{"remote"}, + Responses: responsesNoContent(), + }, + } +} + +func (*OpenAPIBuilder) remoteWritePath() *v3.PathItem { + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "remoteWrite", + Summary: "Remote write endpoint", + Description: "Prometheus remote write endpoint for sending metrics. Accepts Protocol Buffer encoded write requests.", + Tags: []string{"remote"}, + Responses: responsesNoContent(), + }, + } +} + +func (*OpenAPIBuilder) otlpWritePath() *v3.PathItem { + return &v3.PathItem{ + Post: &v3.Operation{ + OperationId: "otlpWrite", + Summary: "OTLP metrics write endpoint", + Description: "OpenTelemetry Protocol metrics ingestion endpoint. Accepts OTLP/HTTP metrics in Protocol Buffer format.", + Tags: []string{"otlp"}, + Responses: responsesNoContent(), + }, + } +} + +func (*OpenAPIBuilder) notificationsPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-notifications", + Summary: "Get notifications", + Tags: []string{"notifications"}, + Responses: responsesWithErrorExamples("NotificationsOutputBody", notificationsResponseExamples(), errorResponseExamples(), "Notifications retrieved successfully.", "Error retrieving notifications."), + }, + } +} + +// notificationsLivePath defines the /notifications/live endpoint. +// This endpoint uses OpenAPI 3.2's itemSchema feature for documenting SSE streams. +// It is excluded from the OpenAPI 3.1 specification. +func (*OpenAPIBuilder) notificationsLivePath() *v3.PathItem { + codes := orderedmap.New[string, *v3.Response]() + content := orderedmap.New[string, *v3.MediaType]() + + // Create a schema for the SSE message structure. + // Each SSE message has a 'data' field containing JSON. + sseItemProps := orderedmap.New[string, *base.SchemaProxy]() + sseItemProps.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"string"}, + Description: "SSE data field containing JSON-encoded notification.", + ContentMediaType: "application/json", + ContentSchema: schemaRef("#/components/schemas/Notification"), + })) + + content.Set("text/event-stream", &v3.MediaType{ + // Use ItemSchema (OpenAPI 3.2) instead of Schema to describe each SSE message. + ItemSchema: base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Title: "Server Sent Event Message", + Description: "A single SSE message. The data field contains a JSON-encoded Notification object.", + Properties: sseItemProps, + Required: []string{"data"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + }), + Examples: notificationLiveExamples(), + }) + + codes.Set("200", &v3.Response{ + Description: "Server-sent events stream established.", + Content: content, + }) + codes.Set("default", errorResponse()) + + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "notifications-live", + Summary: "Stream live notifications via Server-Sent Events", + Description: "Subscribe to real-time server notifications using SSE. Each event contains a JSON-encoded Notification object in the data field.", + Tags: []string{"notifications"}, + Responses: &v3.Responses{Codes: codes}, + }, + } +} + +func (*OpenAPIBuilder) featuresPath() *v3.PathItem { + return &v3.PathItem{ + Get: &v3.Operation{ + OperationId: "get-features", + Summary: "Get features", + Tags: []string{"features"}, + Responses: responsesWithErrorExamples("FeaturesOutputBody", featuresResponseExamples(), errorResponseExamples(), "Feature flags retrieved successfully.", "Error retrieving features."), + }, + } +} diff --git a/web/api/v1/openapi_schemas.go b/web/api/v1/openapi_schemas.go new file mode 100644 index 0000000000..3a567983f4 --- /dev/null +++ b/web/api/v1/openapi_schemas.go @@ -0,0 +1,1223 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file defines all OpenAPI schema definitions for API request and response types. +// Schemas are organized by functional area: query, labels, series, metadata, targets, +// rules, alerts, and status endpoints. +package v1 + +import ( + "github.com/pb33f/libopenapi/datamodel/high/base" + v3 "github.com/pb33f/libopenapi/datamodel/high/v3" + "github.com/pb33f/libopenapi/orderedmap" +) + +// Schema definitions and components builder. + +func (b *OpenAPIBuilder) buildComponents() *v3.Components { + schemas := orderedmap.New[string, *base.SchemaProxy]() + + // Core schemas. + schemas.Set("Error", b.errorSchema()) + schemas.Set("Labels", b.labelsSchema()) + + // Query schemas. + schemas.Set("QueryOutputBody", b.responseBodySchema("QueryData", "Response body for instant query.")) + schemas.Set("QueryRangeOutputBody", b.responseBodySchema("QueryData", "Response body for range query.")) + schemas.Set("QueryPostInputBody", b.queryPostInputBodySchema()) + schemas.Set("QueryRangePostInputBody", b.queryRangePostInputBodySchema()) + schemas.Set("QueryExemplarsOutputBody", b.simpleResponseBodySchema()) + schemas.Set("QueryExemplarsPostInputBody", b.queryExemplarsPostInputBodySchema()) + schemas.Set("FormatQueryOutputBody", b.formatQueryOutputBodySchema()) + schemas.Set("FormatQueryPostInputBody", b.formatQueryPostInputBodySchema()) + schemas.Set("ParseQueryOutputBody", b.simpleResponseBodySchema()) + schemas.Set("ParseQueryPostInputBody", b.parseQueryPostInputBodySchema()) + schemas.Set("QueryData", b.queryDataSchema()) + schemas.Set("FloatSample", b.floatSampleSchema()) + schemas.Set("HistogramSample", b.histogramSampleSchema()) + schemas.Set("FloatSeries", b.floatSeriesSchema()) + schemas.Set("HistogramSeries", b.histogramSeriesSchema()) + schemas.Set("HistogramValue", b.histogramValueSchema()) + + // Label schemas. + schemas.Set("LabelsOutputBody", b.stringArrayResponseBodySchema()) + schemas.Set("LabelsPostInputBody", b.labelsPostInputBodySchema()) + schemas.Set("LabelValuesOutputBody", b.stringArrayResponseBodySchema()) + + // Series schemas. + schemas.Set("SeriesOutputBody", b.labelsArrayResponseBodySchema()) + schemas.Set("SeriesPostInputBody", b.seriesPostInputBodySchema()) + schemas.Set("SeriesDeleteOutputBody", b.simpleResponseBodySchema()) + + // Metadata schemas. + schemas.Set("Metadata", b.metadataSchema()) + schemas.Set("MetadataOutputBody", b.metadataOutputBodySchema()) + schemas.Set("MetricMetadata", b.metricMetadataSchema()) + + // Target schemas. + schemas.Set("Target", b.targetSchema()) + schemas.Set("DroppedTarget", b.droppedTargetSchema()) + schemas.Set("TargetDiscovery", b.targetDiscoverySchema()) + schemas.Set("TargetsOutputBody", b.refResponseBodySchema("TargetDiscovery", "Response body for targets endpoint.")) + schemas.Set("TargetMetadataOutputBody", b.metricMetadataArrayResponseBodySchema()) + schemas.Set("ScrapePoolsDiscovery", b.scrapePoolsDiscoverySchema()) + schemas.Set("ScrapePoolsOutputBody", b.refResponseBodySchema("ScrapePoolsDiscovery", "Response body for scrape pools endpoint.")) + + // Relabel schemas. + schemas.Set("Config", b.configSchema()) + schemas.Set("RelabelStep", b.relabelStepSchema()) + schemas.Set("RelabelStepsResponse", b.relabelStepsResponseSchema()) + schemas.Set("TargetRelabelStepsOutputBody", b.refResponseBodySchema("RelabelStepsResponse", "Response body for target relabel steps endpoint.")) + + // Rule schemas. + schemas.Set("RuleGroup", b.ruleGroupSchema()) + schemas.Set("RuleDiscovery", b.ruleDiscoverySchema()) + schemas.Set("RulesOutputBody", b.refResponseBodySchema("RuleDiscovery", "Response body for rules endpoint.")) + + // Alert schemas. + schemas.Set("Alert", b.alertSchema()) + schemas.Set("AlertDiscovery", b.alertDiscoverySchema()) + schemas.Set("AlertsOutputBody", b.refResponseBodySchema("AlertDiscovery", "Response body for alerts endpoint.")) + schemas.Set("AlertmanagerTarget", b.alertmanagerTargetSchema()) + schemas.Set("AlertmanagerDiscovery", b.alertmanagerDiscoverySchema()) + schemas.Set("AlertmanagersOutputBody", b.refResponseBodySchema("AlertmanagerDiscovery", "Response body for alertmanagers endpoint.")) + + // Status schemas. + schemas.Set("StatusConfigData", b.statusConfigDataSchema()) + schemas.Set("StatusConfigOutputBody", b.refResponseBodySchema("StatusConfigData", "Response body for status config endpoint.")) + schemas.Set("RuntimeInfo", b.runtimeInfoSchema()) + schemas.Set("StatusRuntimeInfoOutputBody", b.refResponseBodySchema("RuntimeInfo", "Response body for status runtime info endpoint.")) + schemas.Set("PrometheusVersion", b.prometheusVersionSchema()) + schemas.Set("StatusBuildInfoOutputBody", b.refResponseBodySchema("PrometheusVersion", "Response body for status build info endpoint.")) + schemas.Set("StatusFlagsOutputBody", b.statusFlagsOutputBodySchema()) + schemas.Set("HeadStats", b.headStatsSchema()) + schemas.Set("TSDBStat", b.tsdbStatSchema()) + schemas.Set("TSDBStatus", b.tsdbStatusSchema()) + schemas.Set("StatusTSDBOutputBody", b.refResponseBodySchema("TSDBStatus", "Response body for status TSDB endpoint.")) + schemas.Set("BlockDesc", b.blockDescSchema()) + schemas.Set("BlockStats", b.blockStatsSchema()) + schemas.Set("BlockMetaCompaction", b.blockMetaCompactionSchema()) + schemas.Set("BlockMeta", b.blockMetaSchema()) + schemas.Set("StatusTSDBBlocksData", b.statusTSDBBlocksDataSchema()) + schemas.Set("StatusTSDBBlocksOutputBody", b.refResponseBodySchema("StatusTSDBBlocksData", "Response body for status TSDB blocks endpoint.")) + schemas.Set("StatusWALReplayData", b.statusWALReplayDataSchema()) + schemas.Set("StatusWALReplayOutputBody", b.refResponseBodySchema("StatusWALReplayData", "Response body for status WAL replay endpoint.")) + + // Admin schemas. + schemas.Set("DeleteSeriesOutputBody", b.statusOnlyResponseBodySchema()) + schemas.Set("CleanTombstonesOutputBody", b.statusOnlyResponseBodySchema()) + schemas.Set("DataStruct", b.dataStructSchema()) + schemas.Set("SnapshotOutputBody", b.refResponseBodySchema("DataStruct", "Response body for snapshot endpoint.")) + + // Notification schemas. + schemas.Set("Notification", b.notificationSchema()) + schemas.Set("NotificationsOutputBody", b.notificationArrayResponseBodySchema()) + + // Features schema. + schemas.Set("FeaturesOutputBody", b.simpleResponseBodySchema()) + + return &v3.Components{Schemas: schemas} +} + +// Schema definitions using high-level structs. + +func (*OpenAPIBuilder) errorSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("errorType", stringSchemaWithDescriptionAndExample("Type of error that occurred.", "bad_data")) + props.Set("error", stringSchemaWithDescriptionAndExample("Human-readable error message.", "invalid parameter")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Error response.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "errorType", "error"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) labelsSchema() *base.SchemaProxy { + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Label set represented as a key-value map.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: true}, + }) +} + +func (*OpenAPIBuilder) responseBodySchema(dataSchemaRef, description string) *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", schemaRef("#/components/schemas/"+dataSchemaRef)) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: description, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (b *OpenAPIBuilder) refResponseBodySchema(dataSchemaRef, description string) *base.SchemaProxy { + return b.responseBodySchema(dataSchemaRef, description) +} + +func (*OpenAPIBuilder) simpleResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Description: "Response data (structure varies by endpoint).", + Example: createYAMLNode(map[string]any{"result": "ok"}), + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Generic response body.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) statusOnlyResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body containing only status.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) stringArrayResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + Example: createYAMLNode([]string{"__name__", "job", "instance"}), + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body with an array of strings.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) labelsArrayResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/Labels")}, + Example: createYAMLNode([]map[string]string{{"__name__": "up", "job": "prometheus", "instance": "localhost:9090"}}), + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body with an array of label sets.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) metricMetadataArrayResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/MetricMetadata")}, + Example: createYAMLNode([]map[string]any{ + { + "target": map[string]string{ + "instance": "localhost:9090", + "job": "prometheus", + }, + "metric": "up", + "type": "gauge", + "help": "The current health status of the target", + "unit": "", + }, + }), + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body with an array of metric metadata.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) notificationArrayResponseBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/Notification")}, + Example: createYAMLNode([]map[string]any{ + {"text": "Server is running", "date": "2023-07-21T20:00:00.000Z", "active": true}, + }), + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body with an array of notifications.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) floatSampleSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("metric", schemaRef("#/components/schemas/Labels")) + props.Set("value", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Timestamp and float value as [unixTimestamp, stringValue].", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + stringSchema(), + }, + })}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + Example: createYAMLNode([]any{1767436620, "1"}), + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "A sample with a float value.", + Required: []string{"metric", "value"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) histogramValueSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("count", stringSchemaWithDescription("Total count of observations.")) + props.Set("sum", stringSchemaWithDescription("Sum of all observed values.")) + props.Set("buckets", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Histogram buckets as [boundary_rule, lower, upper, count].", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + stringSchema(), + }, + })}, + })}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Native histogram value representation.", + Required: []string{"count", "sum"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) histogramSampleSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("metric", schemaRef("#/components/schemas/Labels")) + props.Set("histogram", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Timestamp and histogram value as [unixTimestamp, histogramObject].", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + schemaRef("#/components/schemas/HistogramValue"), + }, + })}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + Example: createYAMLNode([]any{1767436620, map[string]any{"count": "60", "sum": "120", "buckets": []any{}}}), + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "A sample with a native histogram value.", + Required: []string{"metric", "histogram"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) floatSeriesSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("metric", schemaRef("#/components/schemas/Labels")) + props.Set("values", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Array of [timestamp, stringValue] pairs for float values.", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + stringSchema(), + }, + })}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + })}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "A time series with float values.", + Required: []string{"metric", "values"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) histogramSeriesSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("metric", schemaRef("#/components/schemas/Labels")) + props.Set("histograms", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Array of [timestamp, histogramObject] pairs for histogram values.", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + schemaRef("#/components/schemas/HistogramValue"), + }, + })}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + })}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "A time series with native histogram values.", + Required: []string{"metric", "histograms"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) queryDataSchema() *base.SchemaProxy { + // Vector query result. + vectorProps := orderedmap.New[string, *base.SchemaProxy]() + vectorProps.Set("resultType", stringSchemaWithConstValue("vector")) + vectorProps.Set("result", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Array of samples (either float or histogram).", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + AnyOf: []*base.SchemaProxy{ + schemaRef("#/components/schemas/FloatSample"), + schemaRef("#/components/schemas/HistogramSample"), + }, + })}, + })) + + // Matrix query result. + matrixProps := orderedmap.New[string, *base.SchemaProxy]() + matrixProps.Set("resultType", stringSchemaWithConstValue("matrix")) + matrixProps.Set("result", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Array of time series (either float or histogram).", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + AnyOf: []*base.SchemaProxy{ + schemaRef("#/components/schemas/FloatSeries"), + schemaRef("#/components/schemas/HistogramSeries"), + }, + })}, + })) + + // Scalar query result. + scalarProps := orderedmap.New[string, *base.SchemaProxy]() + scalarProps.Set("resultType", stringSchemaWithConstValue("scalar")) + scalarProps.Set("result", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Scalar value as [timestamp, stringValue].", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{ + OneOf: []*base.SchemaProxy{ + base.CreateSchemaProxy(&base.Schema{Type: []string{"number"}}), + stringSchema(), + }, + })}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + })) + + // String query result. + stringResultProps := orderedmap.New[string, *base.SchemaProxy]() + stringResultProps.Set("resultType", stringSchemaWithConstValue("string")) + stringResultProps.Set("result", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "String value as [timestamp, stringValue].", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + MinItems: int64Ptr(2), + MaxItems: int64Ptr(2), + })) + + return base.CreateSchemaProxy(&base.Schema{ + Description: "Query result data. The structure of 'result' depends on 'resultType'.", + AnyOf: []*base.SchemaProxy{ + // resultType: vector -> result: array of samples. + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Required: []string{"resultType", "result"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: vectorProps, + }), + // resultType: matrix -> result: array of series. + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Required: []string{"resultType", "result"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: matrixProps, + }), + // resultType: scalar -> result: [timestamp, value]. + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Required: []string{"resultType", "result"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: scalarProps, + }), + // resultType: string -> result: [timestamp, stringValue]. + base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Required: []string{"resultType", "result"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: stringResultProps, + }), + }, + Example: createYAMLNode(map[string]any{ + "resultType": "vector", + "result": []map[string]any{ + { + "metric": map[string]string{"__name__": "up", "job": "prometheus"}, + "value": []any{1627845600, "1"}, + }, + }, + }), + }) +} + +func (*OpenAPIBuilder) queryPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("query", stringSchemaWithDescriptionAndExample("Form field: The PromQL query to execute.", "up")) + props.Set("time", stringSchemaWithDescriptionAndExample("Form field: The evaluation timestamp (optional, defaults to current time).", "2023-07-21T20:10:51.781Z")) + props.Set("limit", integerSchemaWithDescriptionAndExample("Form field: The maximum number of metrics to return.", 100)) + props.Set("timeout", stringSchemaWithDescriptionAndExample("Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).", "30s")) + props.Set("lookback_delta", stringSchemaWithDescriptionAndExample("Form field: Override the lookback period for this query (optional).", "5m")) + props.Set("stats", stringSchemaWithDescriptionAndExample("Form field: When provided, include query statistics in the response (the special value 'all' enables more comprehensive statistics).", "all")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for instant query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"query"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) queryRangePostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("query", stringSchemaWithDescriptionAndExample("Form field: The query to execute.", "rate(http_requests_total[5m])")) + props.Set("start", stringSchemaWithDescriptionAndExample("Form field: The start time of the query.", "2023-07-21T20:10:30.781Z")) + props.Set("end", stringSchemaWithDescriptionAndExample("Form field: The end time of the query.", "2023-07-21T20:20:30.781Z")) + props.Set("step", stringSchemaWithDescriptionAndExample("Form field: The step size of the query.", "15s")) + props.Set("limit", integerSchemaWithDescriptionAndExample("Form field: The maximum number of metrics to return.", 100)) + props.Set("timeout", stringSchemaWithDescriptionAndExample("Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).", "30s")) + props.Set("lookback_delta", stringSchemaWithDescriptionAndExample("Form field: Override the lookback period for this query (optional).", "5m")) + props.Set("stats", stringSchemaWithDescriptionAndExample("Form field: When provided, include query statistics in the response (the special value 'all' enables more comprehensive statistics).", "all")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for range query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"query", "start", "end", "step"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) queryExemplarsPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("query", stringSchemaWithDescriptionAndExample("Form field: The query to execute.", "http_requests_total")) + props.Set("start", stringSchemaWithDescriptionAndExample("Form field: The start time of the query.", "2023-07-21T20:00:00.000Z")) + props.Set("end", stringSchemaWithDescriptionAndExample("Form field: The end time of the query.", "2023-07-21T21:00:00.000Z")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for exemplars query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"query"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) formatQueryOutputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", stringSchemaWithDescriptionAndExample("Formatted query string.", "sum by(status) (rate(http_requests_total[5m]))")) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body for format query endpoint.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) formatQueryPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("query", stringSchemaWithDescriptionAndExample("Form field: The query to format.", "sum(rate(http_requests_total[5m])) by (status)")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for format query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"query"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) parseQueryPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("query", stringSchemaWithDescriptionAndExample("Form field: The query to parse.", "sum(rate(http_requests_total[5m]))")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for parse query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"query"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) labelsPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("start", stringSchemaWithDescriptionAndExample("Form field: The start time of the query.", "2023-07-21T20:00:00.000Z")) + props.Set("end", stringSchemaWithDescriptionAndExample("Form field: The end time of the query.", "2023-07-21T21:00:00.000Z")) + props.Set("match[]", stringArraySchemaWithDescriptionAndExample("Form field: Series selector argument that selects the series from which to read the label names.", []string{"{job=\"prometheus\"}"})) + props.Set("limit", integerSchemaWithDescriptionAndExample("Form field: The maximum number of label names to return.", 100)) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for labels query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) seriesPostInputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("start", stringSchemaWithDescriptionAndExample("Form field: The start time of the query.", "2023-07-21T20:00:00.000Z")) + props.Set("end", stringSchemaWithDescriptionAndExample("Form field: The end time of the query.", "2023-07-21T21:00:00.000Z")) + props.Set("match[]", stringArraySchemaWithDescriptionAndExample("Form field: Series selector argument that selects the series to return.", []string{"{job=\"prometheus\"}"})) + props.Set("limit", integerSchemaWithDescriptionAndExample("Form field: The maximum number of series to return.", 100)) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "POST request body for series query.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"match[]"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) metadataSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("type", stringSchemaWithDescription("Metric type (counter, gauge, histogram, summary, or untyped).")) + props.Set("unit", stringSchemaWithDescription("Unit of the metric.")) + props.Set("help", stringSchemaWithDescription("Help text describing the metric.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Metric metadata.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"type", "unit", "help"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) metadataOutputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{ + A: base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/Metadata")}, + }), + }, + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body for metadata endpoint.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) metricMetadataSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("target", schemaRef("#/components/schemas/Labels")) + props.Set("metric", stringSchemaWithDescription("Metric name.")) + props.Set("type", stringSchemaWithDescription("Metric type (counter, gauge, histogram, summary, or untyped).")) + props.Set("help", stringSchemaWithDescription("Help text describing the metric.")) + props.Set("unit", stringSchemaWithDescription("Unit of the metric.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Target metric metadata.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"target", "type", "help", "unit"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) targetSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("discoveredLabels", schemaRef("#/components/schemas/Labels")) + props.Set("labels", schemaRef("#/components/schemas/Labels")) + props.Set("scrapePool", stringSchemaWithDescription("Name of the scrape pool.")) + props.Set("scrapeUrl", stringSchemaWithDescription("URL of the target.")) + props.Set("globalUrl", stringSchemaWithDescription("Global URL of the target.")) + props.Set("lastError", stringSchemaWithDescription("Last error message from scraping.")) + props.Set("lastScrape", dateTimeSchemaWithDescription("Timestamp of the last scrape.")) + props.Set("lastScrapeDuration", numberSchemaWithDescription("Duration of the last scrape in seconds.")) + props.Set("health", stringSchemaWithDescription("Health status of the target (up, down, or unknown).")) + props.Set("scrapeInterval", stringSchemaWithDescription("Scrape interval for this target.")) + props.Set("scrapeTimeout", stringSchemaWithDescription("Scrape timeout for this target.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Scrape target information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"discoveredLabels", "labels", "scrapePool", "scrapeUrl", "globalUrl", "lastError", "lastScrape", "lastScrapeDuration", "health", "scrapeInterval", "scrapeTimeout"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) droppedTargetSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("discoveredLabels", schemaRef("#/components/schemas/Labels")) + props.Set("scrapePool", stringSchemaWithDescription("Name of the scrape pool.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Dropped target information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"discoveredLabels", "scrapePool"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) targetDiscoverySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("activeTargets", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/Target")}, + })) + props.Set("droppedTargets", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/DroppedTarget")}, + })) + props.Set("droppedTargetCounts", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{A: integerSchema()}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Target discovery information including active and dropped targets.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"activeTargets", "droppedTargets", "droppedTargetCounts"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) scrapePoolsDiscoverySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("scrapePools", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "List of all configured scrape pools.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"scrapePools"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) configSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("source_labels", stringArraySchemaWithDescription("Source labels for relabeling.")) + props.Set("separator", stringSchemaWithDescription("Separator for source label values.")) + props.Set("regex", stringSchemaWithDescription("Regular expression for matching.")) + props.Set("modulus", integerSchemaWithDescription("Modulus for hash-based relabeling.")) + props.Set("target_label", stringSchemaWithDescription("Target label name.")) + props.Set("replacement", stringSchemaWithDescription("Replacement value.")) + props.Set("action", stringSchemaWithDescription("Relabel action.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Relabel configuration.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) relabelStepSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("rule", schemaRef("#/components/schemas/Config")) + props.Set("output", schemaRef("#/components/schemas/Labels")) + props.Set("keep", base.CreateSchemaProxy(&base.Schema{Type: []string{"boolean"}})) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Relabel step showing the rule, output, and whether the target was kept.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"rule", "output", "keep"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) relabelStepsResponseSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("steps", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/RelabelStep")}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Relabeling steps response.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"steps"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) ruleGroupSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("name", stringSchemaWithDescription("Name of the rule group.")) + props.Set("file", stringSchemaWithDescription("File containing the rule group.")) + props.Set("rules", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Description: "Rules in this group.", + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: base.CreateSchemaProxy(&base.Schema{Type: []string{"object"}, Description: "Rule definition."})}, + })) + props.Set("interval", numberSchemaWithDescription("Evaluation interval in seconds.")) + props.Set("limit", integerSchemaWithDescription("Maximum number of alerts for this group.")) + props.Set("evaluationTime", numberSchemaWithDescription("Time taken to evaluate the group in seconds.")) + props.Set("lastEvaluation", dateTimeSchemaWithDescription("Timestamp of the last evaluation.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Rule group information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"name", "file", "rules", "interval", "limit", "evaluationTime", "lastEvaluation"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) ruleDiscoverySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("groups", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/RuleGroup")}, + })) + props.Set("groupNextToken", stringSchemaWithDescription("Pagination token for the next page of groups.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Rule discovery information containing all rule groups.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"groups"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) alertSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("labels", schemaRef("#/components/schemas/Labels")) + props.Set("annotations", schemaRef("#/components/schemas/Labels")) + props.Set("state", stringSchemaWithDescription("State of the alert (pending, firing, or inactive).")) + props.Set("value", stringSchemaWithDescription("Value of the alert expression.")) + props.Set("activeAt", dateTimeSchemaWithDescription("Timestamp when the alert became active.")) + props.Set("keepFiringSince", dateTimeSchemaWithDescription("Timestamp since the alert has been kept firing.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Alert information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"labels", "annotations", "state", "value"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) alertDiscoverySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("alerts", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/Alert")}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Alert discovery information containing all active alerts.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"alerts"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) alertmanagerTargetSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("url", stringSchemaWithDescription("URL of the Alertmanager instance.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Alertmanager target information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"url"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) alertmanagerDiscoverySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("activeAlertmanagers", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/AlertmanagerTarget")}, + })) + props.Set("droppedAlertmanagers", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/AlertmanagerTarget")}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Alertmanager discovery information including active and dropped instances.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"activeAlertmanagers", "droppedAlertmanagers"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) statusConfigDataSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("yaml", stringSchemaWithDescription("Prometheus configuration in YAML format.")) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Prometheus configuration.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"yaml"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) runtimeInfoSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("startTime", base.CreateSchemaProxy(&base.Schema{Type: []string{"string"}, Format: "date-time"})) + props.Set("CWD", stringSchema()) + props.Set("hostname", stringSchema()) + props.Set("serverTime", base.CreateSchemaProxy(&base.Schema{Type: []string{"string"}, Format: "date-time"})) + props.Set("reloadConfigSuccess", base.CreateSchemaProxy(&base.Schema{Type: []string{"boolean"}})) + props.Set("lastConfigTime", base.CreateSchemaProxy(&base.Schema{Type: []string{"string"}, Format: "date-time"})) + props.Set("corruptionCount", integerSchema()) + props.Set("goroutineCount", integerSchema()) + props.Set("GOMAXPROCS", integerSchema()) + props.Set("GOMEMLIMIT", integerSchema()) + props.Set("GOGC", stringSchema()) + props.Set("GODEBUG", stringSchema()) + props.Set("storageRetention", stringSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Prometheus runtime information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"startTime", "CWD", "hostname", "serverTime", "reloadConfigSuccess", "lastConfigTime", "corruptionCount", "goroutineCount", "GOMAXPROCS", "GOMEMLIMIT", "GOGC", "GODEBUG", "storageRetention"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) prometheusVersionSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("version", stringSchema()) + props.Set("revision", stringSchema()) + props.Set("branch", stringSchema()) + props.Set("buildUser", stringSchema()) + props.Set("buildDate", stringSchema()) + props.Set("goVersion", stringSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Prometheus version information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"version", "revision", "branch", "buildUser", "buildDate", "goVersion"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) statusFlagsOutputBodySchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("status", statusSchema()) + props.Set("data", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + })) + props.Set("warnings", warningsSchema()) + props.Set("infos", infosSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Response body for status flags endpoint.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"status", "data"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) headStatsSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("numSeries", integerSchema()) + props.Set("numLabelPairs", integerSchema()) + props.Set("chunkCount", integerSchema()) + props.Set("minTime", integerSchema()) + props.Set("maxTime", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "TSDB head statistics.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"numSeries", "numLabelPairs", "chunkCount", "minTime", "maxTime"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) tsdbStatSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("name", stringSchema()) + props.Set("value", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "TSDB statistic.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"name", "value"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) tsdbStatusSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("headStats", schemaRef("#/components/schemas/HeadStats")) + props.Set("seriesCountByMetricName", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/TSDBStat")}, + })) + props.Set("labelValueCountByLabelName", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/TSDBStat")}, + })) + props.Set("memoryInBytesByLabelName", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/TSDBStat")}, + })) + props.Set("seriesCountByLabelValuePair", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/TSDBStat")}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "TSDB status information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"headStats", "seriesCountByMetricName", "labelValueCountByLabelName", "memoryInBytesByLabelName", "seriesCountByLabelValuePair"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) blockDescSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("ulid", stringSchema()) + props.Set("minTime", integerSchema()) + props.Set("maxTime", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Block descriptor.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"ulid", "minTime", "maxTime"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) blockStatsSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("numSamples", integerSchema()) + props.Set("numSeries", integerSchema()) + props.Set("numChunks", integerSchema()) + props.Set("numTombstones", integerSchema()) + props.Set("numFloatSamples", integerSchema()) + props.Set("numHistogramSamples", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Block statistics.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) blockMetaCompactionSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("level", integerSchema()) + props.Set("sources", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + })) + props.Set("parents", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/BlockDesc")}, + })) + props.Set("failed", base.CreateSchemaProxy(&base.Schema{Type: []string{"boolean"}})) + props.Set("deletable", base.CreateSchemaProxy(&base.Schema{Type: []string{"boolean"}})) + props.Set("hints", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: stringSchema()}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Block compaction metadata.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"level"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) blockMetaSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("ulid", stringSchema()) + props.Set("minTime", integerSchema()) + props.Set("maxTime", integerSchema()) + props.Set("stats", schemaRef("#/components/schemas/BlockStats")) + props.Set("compaction", schemaRef("#/components/schemas/BlockMetaCompaction")) + props.Set("version", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Block metadata.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"ulid", "minTime", "maxTime", "compaction", "version"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) statusTSDBBlocksDataSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("blocks", base.CreateSchemaProxy(&base.Schema{ + Type: []string{"array"}, + Items: &base.DynamicValue[*base.SchemaProxy, bool]{A: schemaRef("#/components/schemas/BlockMeta")}, + })) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "TSDB blocks information.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"blocks"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) statusWALReplayDataSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("min", integerSchema()) + props.Set("max", integerSchema()) + props.Set("current", integerSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "WAL replay status.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"min", "max", "current"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) dataStructSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("name", stringSchema()) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Generic data structure with a name field.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"name"}, + Properties: props, + }) +} + +func (*OpenAPIBuilder) notificationSchema() *base.SchemaProxy { + props := orderedmap.New[string, *base.SchemaProxy]() + props.Set("text", stringSchema()) + props.Set("date", base.CreateSchemaProxy(&base.Schema{Type: []string{"string"}, Format: "date-time"})) + props.Set("active", base.CreateSchemaProxy(&base.Schema{Type: []string{"boolean"}})) + + return base.CreateSchemaProxy(&base.Schema{ + Type: []string{"object"}, + Description: "Server notification.", + AdditionalProperties: &base.DynamicValue[*base.SchemaProxy, bool]{N: 1, B: false}, + Required: []string{"text", "date", "active"}, + Properties: props, + }) +} diff --git a/web/api/v1/openapi_test.go b/web/api/v1/openapi_test.go new file mode 100644 index 0000000000..0d2f5cc83e --- /dev/null +++ b/web/api/v1/openapi_test.go @@ -0,0 +1,289 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/prometheus/common/promslog" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v2" +) + +// TestOpenAPIHTTPHandler verifies that the OpenAPI endpoint serves a valid specification +// with correct headers, structure conforming to OpenAPI 3.1 standards, and consistent responses. +func TestOpenAPIHTTPHandler(t *testing.T) { + builder := NewOpenAPIBuilder(OpenAPIOptions{}, promslog.NewNopLogger()) + + // First request. + req1 := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml", nil) + rec1 := httptest.NewRecorder() + builder.ServeOpenAPI(rec1, req1) + + // Verify status code and headers. + require.Equal(t, http.StatusOK, rec1.Code) + require.True(t, strings.HasPrefix(rec1.Header().Get("Content-Type"), "application/yaml"), "Content-Type should start with application/yaml") + require.Equal(t, "no-cache, no-store, must-revalidate", rec1.Header().Get("Cache-Control")) + + // Verify it is valid YAML. + var spec map[string]any + err := yaml.Unmarshal(rec1.Body.Bytes(), &spec) + require.NoError(t, err) + + // Verify structure. + require.Contains(t, spec, "openapi") + require.Contains(t, spec, "info") + require.Contains(t, spec, "paths") + require.Contains(t, spec, "components") + + // Verify OpenAPI version (default is 3.1.0). + require.Equal(t, "3.1.0", spec["openapi"]) + + // Verify info section. + info, ok := spec["info"].(map[any]any) + require.True(t, ok, "info should be a map") + require.Equal(t, "Prometheus API", info["title"]) + + // Verify paths exist. + paths, ok := spec["paths"].(map[any]any) + require.True(t, ok, "paths should be a map") + require.NotEmpty(t, paths, "paths should not be empty") + + // Second request to verify response consistency. + req2 := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml", nil) + rec2 := httptest.NewRecorder() + builder.ServeOpenAPI(rec2, req2) + + // Both responses should be identical. + require.Equal(t, rec1.Body.String(), rec2.Body.String()) +} + +// TestOpenAPIPathFiltering verifies that the IncludePaths option correctly filters +// which API paths are included in the generated specification. +func TestOpenAPIPathFiltering(t *testing.T) { + tests := []struct { + name string + includePaths []string + wantPaths []string + excludePaths []string + }{ + { + name: "no filter includes all", + includePaths: nil, + wantPaths: []string{"/query", "/labels", "/alerts", "/targets"}, + }, + { + name: "filter query paths", + includePaths: []string{"/query"}, + wantPaths: []string{"/query", "/query_range", "/query_exemplars"}, + excludePaths: []string{"/labels", "/alerts", "/targets"}, + }, + { + name: "filter status paths", + includePaths: []string{"/status"}, + wantPaths: []string{"/status/config", "/status/flags", "/status/runtimeinfo"}, + excludePaths: []string{"/query", "/alerts", "/targets"}, + }, + { + name: "filter multiple prefixes", + includePaths: []string{"/label", "/series"}, + wantPaths: []string{"/labels", "/label/{name}/values", "/series"}, + excludePaths: []string{"/query", "/alerts", "/targets"}, + }, + { + name: "exact path match", + includePaths: []string{"/alerts"}, + wantPaths: []string{"/alerts"}, + excludePaths: []string{"/alertmanagers", "/query"}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + builder := NewOpenAPIBuilder(OpenAPIOptions{ + IncludePaths: tc.includePaths, + }, promslog.NewNopLogger()) + + req := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml", nil) + rec := httptest.NewRecorder() + builder.ServeOpenAPI(rec, req) + + require.Equal(t, http.StatusOK, rec.Code) + + var spec map[string]any + err := yaml.Unmarshal(rec.Body.Bytes(), &spec) + require.NoError(t, err) + + paths, ok := spec["paths"].(map[any]any) + require.True(t, ok, "paths should be a map") + + for _, want := range tc.wantPaths { + require.Contains(t, paths, want) + } + + for _, exclude := range tc.excludePaths { + require.NotContains(t, paths, exclude) + } + }) + } +} + +// TestOpenAPISchemaCompleteness verifies that all referenced schemas in paths +// are defined in the components/schemas section of the specification. +func TestOpenAPISchemaCompleteness(t *testing.T) { + builder := NewOpenAPIBuilder(OpenAPIOptions{}, promslog.NewNopLogger()) + + req := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml", nil) + rec := httptest.NewRecorder() + builder.ServeOpenAPI(rec, req) + + var spec map[string]any + err := yaml.Unmarshal(rec.Body.Bytes(), &spec) + require.NoError(t, err) + + components, ok := spec["components"].(map[any]any) + require.True(t, ok, "components should be a map") + + schemas, ok := components["schemas"].(map[any]any) + require.True(t, ok, "schemas should be a map") + + // Verify essential schemas are present. + essentialSchemas := []string{ + "Error", + "Labels", + "QueryOutputBody", + "LabelsOutputBody", + "SeriesOutputBody", + "TargetsOutputBody", + "AlertsOutputBody", + "RulesOutputBody", + "StatusConfigOutputBody", + "StatusFlagsOutputBody", + "PrometheusVersion", + } + + for _, schema := range essentialSchemas { + require.Contains(t, schemas, schema) + } +} + +// TODO: Add test to verify all routes from api.go Register() are covered in OpenAPI spec. +// Consider wrapping Router to track registered paths and cross-check with OpenAPI paths. + +// TestOpenAPIShouldIncludePath verifies the shouldIncludePath method correctly +// matches paths against the IncludePaths filter configuration. +func TestOpenAPIShouldIncludePath(t *testing.T) { + tests := []struct { + name string + includePaths []string + path string + expected bool + }{ + { + name: "empty filter includes all", + includePaths: nil, + path: "/query", + expected: true, + }, + { + name: "exact match", + includePaths: []string{"/query"}, + path: "/query", + expected: true, + }, + { + name: "prefix match", + includePaths: []string{"/query"}, + path: "/query_range", + expected: true, + }, + { + name: "no match", + includePaths: []string{"/query"}, + path: "/labels", + expected: false, + }, + { + name: "multiple filters with match", + includePaths: []string{"/labels", "/series"}, + path: "/series", + expected: true, + }, + { + name: "multiple filters without match", + includePaths: []string{"/labels", "/series"}, + path: "/query", + expected: false, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + builder := &OpenAPIBuilder{ + options: OpenAPIOptions{ + IncludePaths: tc.includePaths, + }, + } + + result := builder.shouldIncludePath(tc.path) + require.Equal(t, tc.expected, result) + }) + } +} + +// TestOpenAPIVersionConsistency verifies that both OpenAPI versions are properly generated +// and that 3.2 has exactly one more path than 3.1 (/notifications/live). +func TestOpenAPIVersionConsistency(t *testing.T) { + builder := NewOpenAPIBuilder(OpenAPIOptions{}, promslog.NewNopLogger()) + + // Fetch OpenAPI 3.1 spec (default). + req31 := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml", nil) + rec31 := httptest.NewRecorder() + builder.ServeOpenAPI(rec31, req31) + + require.Equal(t, http.StatusOK, rec31.Code) + + // Fetch OpenAPI 3.2 spec. + req32 := httptest.NewRequest(http.MethodGet, "/api/v1/openapi.yaml?openapi_version=3.2", nil) + rec32 := httptest.NewRecorder() + builder.ServeOpenAPI(rec32, req32) + + require.Equal(t, http.StatusOK, rec32.Code) + + // Parse both specs. + var spec31, spec32 map[string]any + err := yaml.Unmarshal(rec31.Body.Bytes(), &spec31) + require.NoError(t, err) + err = yaml.Unmarshal(rec32.Body.Bytes(), &spec32) + require.NoError(t, err) + + // Verify versions are different. + require.Equal(t, "3.1.0", spec31["openapi"]) + require.Equal(t, "3.2.0", spec32["openapi"]) + + // Verify /notifications/live is only in 3.2. + paths31 := spec31["paths"].(map[any]any) + paths32 := spec32["paths"].(map[any]any) + + require.NotContains(t, paths31, "/notifications/live") + + require.Contains(t, paths32, "/notifications/live") + + // Verify 3.2 has exactly one more path than 3.1. + require.Len(t, paths32, len(paths31)+1, + "OpenAPI 3.2 should have exactly one more path than 3.1") +} diff --git a/web/api/v1/test_helpers.go b/web/api/v1/test_helpers.go new file mode 100644 index 0000000000..2662b0c84b --- /dev/null +++ b/web/api/v1/test_helpers.go @@ -0,0 +1,157 @@ +// Copyright The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "context" + "testing" + "time" + + "github.com/prometheus/common/route" + + "github.com/prometheus/prometheus/web/api/testhelpers" +) + +// newTestAPI creates a new API instance for testing using testhelpers. +func newTestAPI(t *testing.T, cfg testhelpers.APIConfig) *testhelpers.APIWrapper { + t.Helper() + + params := testhelpers.PrepareAPI(t, cfg) + + // Adapt the testhelpers interfaces to v1 interfaces. + api := NewAPI( + params.QueryEngine, + params.Queryable, + nil, // appendable + params.ExemplarQueryable, + func(ctx context.Context) ScrapePoolsRetriever { + return adaptScrapePoolsRetriever(params.ScrapePoolsRetriever(ctx)) + }, + func(ctx context.Context) TargetRetriever { + return adaptTargetRetriever(params.TargetRetriever(ctx)) + }, + func(ctx context.Context) AlertmanagerRetriever { + return adaptAlertmanagerRetriever(params.AlertmanagerRetriever(ctx)) + }, + params.ConfigFunc, + params.FlagsMap, + GlobalURLOptions{}, + params.ReadyFunc, + adaptTSDBAdminStats(params.TSDBAdmin), + params.DBDir, + false, // enableAdmin + params.Logger, + func(ctx context.Context) RulesRetriever { + return adaptRulesRetriever(params.RulesRetriever(ctx)) + }, + 0, // remoteReadSampleLimit + 0, // remoteReadConcurrencyLimit + 0, // remoteReadMaxBytesInFrame + false, // isAgent + nil, // corsOrigin + func() (RuntimeInfo, error) { + info, err := params.RuntimeInfoFunc() + return RuntimeInfo{ + StartTime: info.StartTime, + CWD: info.CWD, + Hostname: info.Hostname, + ServerTime: info.ServerTime, + ReloadConfigSuccess: info.ReloadConfigSuccess, + LastConfigTime: info.LastConfigTime, + CorruptionCount: info.CorruptionCount, + GoroutineCount: info.GoroutineCount, + GOMAXPROCS: info.GOMAXPROCS, + GOMEMLIMIT: info.GOMEMLIMIT, + GOGC: info.GOGC, + GODEBUG: info.GODEBUG, + StorageRetention: info.StorageRetention, + }, err + }, + &PrometheusVersion{ + Version: params.BuildInfo.Version, + Revision: params.BuildInfo.Revision, + Branch: params.BuildInfo.Branch, + BuildUser: params.BuildInfo.BuildUser, + BuildDate: params.BuildInfo.BuildDate, + GoVersion: params.BuildInfo.GoVersion, + }, + params.NotificationsGetter, + params.NotificationsSub, + params.Gatherer, + params.Registerer, + nil, // statsRenderer + false, // rwEnabled + nil, // acceptRemoteWriteProtoMsgs + false, // otlpEnabled + false, // otlpDeltaToCumulative + false, // otlpNativeDeltaIngestion + false, // stZeroIngestionEnabled + 5*time.Minute, // lookbackDelta + false, // enableTypeAndUnitLabels + false, // appendMetadata + nil, // overrideErrorCode + nil, // featureRegistry + OpenAPIOptions{}, // openAPIOptions + ) + + // Register routes. + router := route.New() + api.Register(router.WithPrefix("/api/v1")) + + return &testhelpers.APIWrapper{ + Handler: router, + } +} + +// Adapter functions to convert testhelpers interfaces to v1 interfaces. + +type rulesRetrieverAdapter struct { + testhelpers.RulesRetriever +} + +func adaptRulesRetriever(r testhelpers.RulesRetriever) RulesRetriever { + return &rulesRetrieverAdapter{r} +} + +type targetRetrieverAdapter struct { + testhelpers.TargetRetriever +} + +func adaptTargetRetriever(t testhelpers.TargetRetriever) TargetRetriever { + return &targetRetrieverAdapter{t} +} + +type scrapePoolsRetrieverAdapter struct { + testhelpers.ScrapePoolsRetriever +} + +func adaptScrapePoolsRetriever(s testhelpers.ScrapePoolsRetriever) ScrapePoolsRetriever { + return &scrapePoolsRetrieverAdapter{s} +} + +type alertmanagerRetrieverAdapter struct { + testhelpers.AlertmanagerRetriever +} + +func adaptAlertmanagerRetriever(a testhelpers.AlertmanagerRetriever) AlertmanagerRetriever { + return &alertmanagerRetrieverAdapter{a} +} + +type tsdbAdminStatsAdapter struct { + testhelpers.TSDBAdminStats +} + +func adaptTSDBAdminStats(t testhelpers.TSDBAdminStats) TSDBAdminStats { + return &tsdbAdminStatsAdapter{t} +} diff --git a/web/api/v1/testdata/openapi_3.1_golden.yaml b/web/api/v1/testdata/openapi_3.1_golden.yaml new file mode 100644 index 0000000000..c69694b530 --- /dev/null +++ b/web/api/v1/testdata/openapi_3.1_golden.yaml @@ -0,0 +1,4401 @@ +openapi: 3.1.0 +info: + title: Prometheus API + description: Prometheus is an Open-Source monitoring system with a dimensional data model, flexible query language, efficient time series database and modern alerting approach. + contact: + name: Prometheus Community + url: https://prometheus.io/community/ + version: 0.0.1-undefined +servers: + - url: /api/v1 +paths: + /query: + get: + tags: + - query + summary: Evaluate an instant query + operationId: query + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: time + in: query + description: The evaluation timestamp (optional, defaults to current time). + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: query + in: query + description: The PromQL query to execute. + required: true + explode: false + schema: + type: string + examples: + example: + value: up + - name: timeout + in: query + description: Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag. + required: false + explode: false + schema: + type: string + examples: + example: + value: 30s + - name: lookback_delta + in: query + description: Override the lookback period for this query. Optional. + required: false + explode: false + schema: + type: string + examples: + example: + value: 5m + - name: stats + in: query + description: When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics. + required: false + explode: false + schema: + type: string + examples: + example: + value: all + responses: + "200": + description: Query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryOutputBody' + examples: + vectorResult: + summary: 'Instant vector query: up' + value: {"status": "success", "data": {"resultType": "vector", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "value": [1767436620, "1"]}, {"metric": {"__name__": "up", "env": "demo", "instance": "demo.prometheus.io:9093", "job": "alertmanager"}, "value": [1767436620, "1"]}]}} + scalarResult: + summary: 'Scalar query: scalar(42)' + value: + data: + result: + - 1767436620 + - "42" + resultType: scalar + status: success + matrixResult: + summary: 'Range vector query: up[5m]' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767436320, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Evaluate an instant query + operationId: query-post + requestBody: + description: Submit an instant query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryPostInputBody' + examples: + simpleQuery: + summary: Simple instant query + value: + query: up + queryWithTime: + summary: Query with specific timestamp + value: + query: up{job="prometheus"} + time: "2026-01-02T13:37:00.000Z" + queryWithLimit: + summary: Query with limit and statistics + value: + limit: 100 + query: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + stats: all + required: true + responses: + "200": + description: Instant query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryOutputBody' + examples: + vectorResult: + summary: 'Instant vector query: up' + value: {"status": "success", "data": {"resultType": "vector", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "value": [1767436620, "1"]}, {"metric": {"__name__": "up", "env": "demo", "instance": "demo.prometheus.io:9093", "job": "alertmanager"}, "value": [1767436620, "1"]}]}} + scalarResult: + summary: 'Scalar query: scalar(42)' + value: + data: + result: + - 1767436620 + - "42" + resultType: scalar + status: success + matrixResult: + summary: 'Range vector query: up[5m]' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767436320, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing instant query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /query_range: + get: + tags: + - query + summary: Evaluate a range query + operationId: query-range + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: start + in: query + description: The start time of the query. + required: true + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: The end time of the query. + required: true + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: step + in: query + description: The step size of the query. + required: true + explode: false + schema: + type: string + examples: + example: + value: 15s + - name: query + in: query + description: The query to execute. + required: true + explode: false + schema: + type: string + examples: + example: + value: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + - name: timeout + in: query + description: Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag. + required: false + explode: false + schema: + type: string + examples: + example: + value: 30s + - name: lookback_delta + in: query + description: Override the lookback period for this query. Optional. + required: false + explode: false + schema: + type: string + examples: + example: + value: 5m + - name: stats + in: query + description: When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics. + required: false + explode: false + schema: + type: string + examples: + example: + value: all + responses: + "200": + description: Range query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRangeOutputBody' + examples: + matrixResult: + summary: 'Range query: rate(prometheus_http_requests_total[5m])' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767433020, "1"], [1767434820, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing range query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Evaluate a range query + operationId: query-range-post + requestBody: + description: Submit a range query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryRangePostInputBody' + examples: + basicRange: + summary: Basic range query + value: + end: "2026-01-02T13:37:00.000Z" + query: up + start: "2026-01-02T12:37:00.000Z" + step: 15s + rateQuery: + summary: Rate calculation over time range + value: + end: "2026-01-02T13:37:00.000Z" + query: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + start: "2026-01-02T12:37:00.000Z" + step: 30s + timeout: 30s + required: true + responses: + "200": + description: Range query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRangeOutputBody' + examples: + matrixResult: + summary: 'Range query: rate(prometheus_http_requests_total[5m])' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767433020, "1"], [1767434820, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing range query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /query_exemplars: + get: + tags: + - query + summary: Query exemplars + operationId: query-exemplars + parameters: + - name: start + in: query + description: Start timestamp for exemplars query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for exemplars query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: query + in: query + description: PromQL query to extract exemplars for. + required: true + explode: false + schema: + type: string + examples: + example: + value: prometheus_http_requests_total + responses: + "200": + description: Exemplars retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExemplarsOutputBody' + examples: + exemplarsResult: + summary: Exemplars for a metric with trace IDs + value: + data: + - exemplars: + - labels: + traceID: abc123def456 + timestamp: 1.689956451781e+09 + value: "1.5" + seriesLabels: + __name__: http_requests_total + job: api-server + method: GET + status: success + default: + description: Error retrieving exemplars. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Query exemplars + operationId: query-exemplars-post + requestBody: + description: Submit an exemplars query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryExemplarsPostInputBody' + examples: + basicExemplar: + summary: Query exemplars for a metric + value: + query: prometheus_http_requests_total + exemplarWithTimeRange: + summary: Exemplars within specific time range + value: + end: "2026-01-02T13:37:00.000Z" + query: prometheus_http_requests_total{job="prometheus"} + start: "2026-01-02T12:37:00.000Z" + required: true + responses: + "200": + description: Exemplars query completed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExemplarsOutputBody' + examples: + exemplarsResult: + summary: Exemplars for a metric with trace IDs + value: + data: + - exemplars: + - labels: + traceID: abc123def456 + timestamp: 1.689956451781e+09 + value: "1.5" + seriesLabels: + __name__: http_requests_total + job: api-server + method: GET + status: success + default: + description: Error processing exemplars query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /format_query: + get: + tags: + - query + summary: Format a PromQL query + operationId: format-query + parameters: + - name: query + in: query + description: PromQL expression to format. + required: true + explode: false + schema: + type: string + examples: + example: + value: sum(rate(http_requests_total[5m])) by (job) + responses: + "200": + description: Query formatted successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FormatQueryOutputBody' + examples: + formattedQuery: + summary: Formatted PromQL query + value: + data: sum by(job, status) (rate(http_requests_total[5m])) + status: success + default: + description: Error formatting query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Format a PromQL query + operationId: format-query-post + requestBody: + description: Submit a PromQL query to format. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/FormatQueryPostInputBody' + examples: + simpleFormat: + summary: Format a simple query + value: + query: up{job="prometheus"} + complexFormat: + summary: Format a complex query + value: + query: sum(rate(http_requests_total[5m])) by (job, status) + required: true + responses: + "200": + description: Query formatting completed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FormatQueryOutputBody' + examples: + formattedQuery: + summary: Formatted PromQL query + value: + data: sum by(job, status) (rate(http_requests_total[5m])) + status: success + default: + description: Error formatting query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /parse_query: + get: + tags: + - query + summary: Parse a PromQL query + operationId: parse-query + parameters: + - name: query + in: query + description: PromQL expression to parse. + required: true + explode: false + schema: + type: string + examples: + example: + value: up{job="prometheus"} + responses: + "200": + description: Query parsed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/ParseQueryOutputBody' + examples: + parsedQuery: + summary: Parsed PromQL expression tree + value: + data: + resultType: vector + status: success + default: + description: Error parsing query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Parse a PromQL query + operationId: parse-query-post + requestBody: + description: Submit a PromQL query to parse. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ParseQueryPostInputBody' + examples: + simpleParse: + summary: Parse a simple query + value: + query: up + complexParse: + summary: Parse a complex query + value: + query: rate(http_requests_total{job="api"}[5m]) + required: true + responses: + "200": + description: Query parsed successfully via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/ParseQueryOutputBody' + examples: + parsedQuery: + summary: Parsed PromQL expression tree + value: + data: + resultType: vector + status: success + default: + description: Error parsing query via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /labels: + get: + tags: + - labels + summary: Get label names + operationId: labels + parameters: + - name: start + in: query + description: Start timestamp for label names query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for label names query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of label names to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + responses: + "200": + description: Label names retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsOutputBody' + examples: + labelNames: + summary: List of label names + value: + data: + - __name__ + - active + - address + - alertmanager + - alertname + - alertstate + - backend + - branch + - code + - collector + - component + - device + - env + - endpoint + - fstype + - handler + - instance + - job + - le + - method + - mode + - name + status: success + default: + description: Error retrieving label names. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - labels + summary: Get label names + operationId: labels-post + requestBody: + description: Submit a label names query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/LabelsPostInputBody' + examples: + allLabels: + summary: Get all label names + value: {} + labelsWithTimeRange: + summary: Get label names within time range + value: + end: "2026-01-02T13:37:00.000Z" + start: "2026-01-02T12:37:00.000Z" + labelsWithMatch: + summary: Get label names matching series selector + value: + match[]: + - up + - process_start_time_seconds{job="prometheus"} + required: true + responses: + "200": + description: Label names retrieved successfully via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsOutputBody' + examples: + labelNames: + summary: List of label names + value: + data: + - __name__ + - active + - address + - alertmanager + - alertname + - alertstate + - backend + - branch + - code + - collector + - component + - device + - env + - endpoint + - fstype + - handler + - instance + - job + - le + - method + - mode + - name + status: success + default: + description: Error retrieving label names via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /label/{name}/values: + get: + tags: + - labels + summary: Get label values + operationId: label-values + parameters: + - name: name + in: path + description: Label name. + required: true + schema: + type: string + - name: start + in: query + description: Start timestamp for label values query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for label values query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of label values to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 1000 + responses: + "200": + description: Label values retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelValuesOutputBody' + examples: + labelValues: + summary: List of values for a label + value: + data: + - alertmanager + - blackbox + - caddy + - cadvisor + - grafana + - node + - prometheus + - random + status: success + default: + description: Error retrieving label values. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /series: + get: + tags: + - series + summary: Find series by label matchers + operationId: series + parameters: + - name: start + in: query + description: Start timestamp for series query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for series query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of series to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + responses: + "200": + description: Series returned matching the provided label matchers. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesOutputBody' + examples: + seriesList: + summary: List of series matching the selector + value: + data: + - __name__: up + env: demo + instance: demo.prometheus.io:8080 + job: cadvisor + - __name__: up + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + - __name__: up + env: demo + instance: demo.prometheus.io:9100 + job: node + - __name__: up + instance: demo.prometheus.io:3000 + job: grafana + - __name__: up + instance: demo.prometheus.io:8996 + job: random + status: success + default: + description: Error retrieving series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - series + summary: Find series by label matchers + operationId: series-post + requestBody: + description: Submit a series query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/SeriesPostInputBody' + examples: + seriesMatch: + summary: Find series by label matchers + value: + match[]: + - up + seriesWithTimeRange: + summary: Find series with time range + value: + end: "2026-01-02T13:37:00.000Z" + match[]: + - up + - process_cpu_seconds_total{job="prometheus"} + start: "2026-01-02T12:37:00.000Z" + required: true + responses: + "200": + description: Series returned matching the provided label matchers via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesOutputBody' + examples: + seriesList: + summary: List of series matching the selector + value: + data: + - __name__: up + env: demo + instance: demo.prometheus.io:8080 + job: cadvisor + - __name__: up + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + - __name__: up + env: demo + instance: demo.prometheus.io:9100 + job: node + - __name__: up + instance: demo.prometheus.io:3000 + job: grafana + - __name__: up + instance: demo.prometheus.io:8996 + job: random + status: success + default: + description: Error retrieving series via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + delete: + tags: + - series + summary: Delete series + description: 'Delete series matching selectors. Note: This is deprecated, use POST /admin/tsdb/delete_series instead.' + operationId: delete-series + responses: + "200": + description: Series marked for deletion. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesDeleteOutputBody' + examples: + seriesDeleted: + summary: Series marked for deletion + value: + status: success + default: + description: Error deleting series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /metadata: + get: + tags: + - metadata + summary: Get metadata + operationId: get-metadata + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: limit_per_metric + in: query + description: The maximum number of metadata entries per metric. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + - name: metric + in: query + description: A metric name to filter metadata for. + required: false + explode: false + schema: + type: string + examples: + example: + value: http_requests_total + responses: + "200": + description: Metric metadata retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/MetadataOutputBody' + examples: + metricMetadata: + summary: Metadata for metrics + value: + data: + go_gc_stack_starting_size_bytes: + - help: The stack size of new goroutines. Sourced from /gc/stack/starting-size:bytes. + type: gauge + unit: "" + prometheus_rule_group_iterations_missed_total: + - help: The total number of rule group evaluations missed due to slow rule group evaluation. + type: counter + unit: "" + prometheus_sd_updates_total: + - help: Total number of update events sent to the SD consumers. + type: counter + unit: "" + status: success + default: + description: Error retrieving metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /scrape_pools: + get: + tags: + - targets + summary: Get scrape pools + operationId: get-scrape-pools + responses: + "200": + description: Scrape pools retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/ScrapePoolsOutputBody' + examples: + scrapePoolsList: + summary: List of scrape pool names + value: + data: + scrapePools: + - alertmanager + - blackbox + - caddy + - cadvisor + - grafana + - node + - prometheus + - random + status: success + default: + description: Error retrieving scrape pools. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets: + get: + tags: + - targets + summary: Get targets + operationId: get-targets + parameters: + - name: scrapePool + in: query + description: Filter targets by scrape pool name. + required: false + explode: false + schema: + type: string + examples: + example: + value: prometheus + - name: state + in: query + description: 'Filter by state: active, dropped, or any.' + required: false + explode: false + schema: + type: string + examples: + example: + value: active + responses: + "200": + description: Target discovery information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetsOutputBody' + examples: + targetsList: + summary: Active and dropped targets + value: + data: + activeTargets: + - discoveredLabels: + __address__: demo.prometheus.io:9093 + __meta_filepath: /etc/prometheus/file_sd/alertmanager.yml + __metrics_path__: /metrics + __scheme__: http + env: demo + job: alertmanager + globalUrl: http://demo.prometheus.io:9093/metrics + health: up + labels: + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + lastError: "" + lastScrape: "2026-01-02T13:36:40.200Z" + lastScrapeDuration: 0.006576866 + scrapeInterval: 15s + scrapePool: alertmanager + scrapeTimeout: 10s + scrapeUrl: http://demo.prometheus.io:9093/metrics + droppedTargetCounts: + alertmanager: 0 + blackbox: 0 + caddy: 0 + cadvisor: 0 + grafana: 0 + node: 0 + prometheus: 0 + random: 0 + droppedTargets: [] + status: success + default: + description: Error retrieving targets. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets/metadata: + get: + tags: + - targets + summary: Get targets metadata + operationId: get-targets-metadata + parameters: + - name: match_target + in: query + description: Label selector to filter targets. + required: false + explode: false + schema: + type: string + examples: + example: + value: '{job="prometheus"}' + - name: metric + in: query + description: Metric name to retrieve metadata for. + required: false + explode: false + schema: + type: string + examples: + example: + value: http_requests_total + - name: limit + in: query + description: Maximum number of targets to match. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + responses: + "200": + description: Target metadata retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetMetadataOutputBody' + examples: + targetMetadata: + summary: Metadata for targets + value: + data: + - help: The current health status of the target + metric: up + target: + instance: localhost:9090 + job: prometheus + type: gauge + unit: "" + status: success + default: + description: Error retrieving target metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets/relabel_steps: + get: + tags: + - targets + summary: Get targets relabel steps + operationId: get-targets-relabel-steps + parameters: + - name: scrapePool + in: query + description: Name of the scrape pool. + required: true + explode: false + schema: + type: string + examples: + example: + value: prometheus + - name: labels + in: query + description: JSON-encoded labels to apply relabel rules to. + required: true + explode: false + schema: + type: string + examples: + example: + value: '{"__address__":"localhost:9090","job":"prometheus"}' + responses: + "200": + description: Relabel steps retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetRelabelStepsOutputBody' + examples: + relabelSteps: + summary: Relabel steps for a target + value: + data: + steps: + - keep: true + output: + __address__: localhost:9090 + instance: localhost:9090 + job: prometheus + rule: + action: replace + regex: (.*) + replacement: $1 + source_labels: + - __address__ + target_label: instance + status: success + default: + description: Error retrieving relabel steps. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /rules: + get: + tags: + - rules + summary: Get alerting and recording rules + operationId: rules + parameters: + - name: type + in: query + description: 'Filter by rule type: alert or record.' + required: false + explode: false + schema: + type: string + examples: + example: + value: alert + - name: rule_name[] + in: query + description: Filter by rule name. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - HighErrorRate + - name: rule_group[] + in: query + description: Filter by rule group name. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - example_alerts + - name: file[] + in: query + description: Filter by file path. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - /etc/prometheus/rules.yml + - name: match[] + in: query + description: Label matchers to filter rules. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{severity="critical"}' + - name: exclude_alerts + in: query + description: Exclude active alerts from response. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + - name: group_limit + in: query + description: Maximum number of rule groups to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: group_next_token + in: query + description: Pagination token for next page. + required: false + explode: false + schema: + type: string + examples: + example: + value: abc123 + responses: + "200": + description: Rules retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/RulesOutputBody' + examples: + ruleGroups: + summary: Alerting and recording rules + value: + data: + groups: + - evaluationTime: 0.000561635 + file: /etc/prometheus/rules/ansible_managed.yml + interval: 15 + lastEvaluation: "2026-01-02T13:36:56.874Z" + limit: 0 + name: ansible managed alert rules + rules: + - annotations: + description: This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the "DeadMansSnitch" integration in PagerDuty. + summary: Ensure entire alerting pipeline is functional + duration: 600 + evaluationTime: 0.000356688 + health: ok + keepFiringFor: 0 + labels: + severity: warning + lastEvaluation: "2026-01-02T13:36:56.874Z" + name: Watchdog + query: vector(1) + state: firing + type: alerting + status: success + default: + description: Error retrieving rules. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /alerts: + get: + tags: + - alerts + summary: Get active alerts + operationId: alerts + responses: + "200": + description: Active alerts retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/AlertsOutputBody' + examples: + activeAlerts: + summary: Currently active alerts + value: + data: + alerts: + - activeAt: "2026-01-02T13:30:00.000Z" + annotations: + description: This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the "DeadMansSnitch" integration in PagerDuty. + summary: Ensure entire alerting pipeline is functional + labels: + alertname: Watchdog + severity: warning + state: firing + value: "1e+00" + status: success + default: + description: Error retrieving alerts. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /alertmanagers: + get: + tags: + - alerts + summary: Get Alertmanager discovery + operationId: alertmanagers + responses: + "200": + description: Alertmanager targets retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/AlertmanagersOutputBody' + examples: + alertmanagerDiscovery: + summary: Alertmanager discovery results + value: + data: + activeAlertmanagers: + - url: http://demo.prometheus.io:9093/api/v2/alerts + droppedAlertmanagers: [] + status: success + default: + description: Error retrieving Alertmanager targets. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/config: + get: + tags: + - status + summary: Get status config + operationId: get-status-config + responses: + "200": + description: Configuration retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusConfigOutputBody' + examples: + configYAML: + summary: Prometheus configuration + value: + data: + yaml: | + global: + scrape_interval: 15s + scrape_timeout: 10s + evaluation_interval: 15s + external_labels: + environment: demo-prometheus-io + alerting: + alertmanagers: + - scheme: http + static_configs: + - targets: + - demo.prometheus.io:9093 + rule_files: + - /etc/prometheus/rules/*.yml + status: success + default: + description: Error retrieving configuration. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/runtimeinfo: + get: + tags: + - status + summary: Get status runtimeinfo + operationId: get-status-runtimeinfo + responses: + "200": + description: Runtime information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusRuntimeInfoOutputBody' + examples: + runtimeInfo: + summary: Runtime information + value: + data: + CWD: / + GODEBUG: "" + GOGC: "75" + GOMAXPROCS: 2 + GOMEMLIMIT: 3703818240 + corruptionCount: 0 + goroutineCount: 88 + hostname: demo-prometheus-io + lastConfigTime: "2026-01-01T13:37:00.000Z" + reloadConfigSuccess: true + serverTime: "2026-01-02T13:37:00.000Z" + startTime: "2026-01-01T13:37:00.000Z" + storageRetention: 31d + status: success + default: + description: Error retrieving runtime information. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/buildinfo: + get: + tags: + - status + summary: Get status buildinfo + operationId: get-status-buildinfo + responses: + "200": + description: Build information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusBuildInfoOutputBody' + examples: + buildInfo: + summary: Build information + value: + data: + branch: HEAD + buildDate: 20251030-07:26:10 + buildUser: root@08c890a84441 + goVersion: go1.25.3 + revision: 0a41f0000705c69ab8e0f9a723fc73e39ed62b07 + version: 3.7.3 + status: success + default: + description: Error retrieving build information. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/flags: + get: + tags: + - status + summary: Get status flags + operationId: get-status-flags + responses: + "200": + description: Command-line flags retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusFlagsOutputBody' + examples: + flags: + summary: Command-line flags + value: + data: + agent: "false" + alertmanager.notification-queue-capacity: "10000" + config.file: /etc/prometheus/prometheus.yml + enable-feature: exemplar-storage,native-histograms + query.max-concurrency: "20" + query.timeout: 2m + storage.tsdb.path: /prometheus + storage.tsdb.retention.time: 15d + web.console.libraries: /usr/share/prometheus/console_libraries + web.console.templates: /usr/share/prometheus/consoles + web.enable-admin-api: "true" + web.enable-lifecycle: "true" + web.listen-address: 0.0.0.0:9090 + web.page-title: Prometheus Time Series Collection and Processing Server + status: success + default: + description: Error retrieving flags. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/tsdb: + get: + tags: + - status + summary: Get TSDB status + operationId: status-tsdb + parameters: + - name: limit + in: query + description: The maximum number of items to return per category. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + responses: + "200": + description: TSDB status retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusTSDBOutputBody' + examples: + tsdbStats: + summary: TSDB statistics + value: + data: + headStats: + chunkCount: 37525 + maxTime: 1767436620000 + minTime: 1767362400712 + numLabelPairs: 2512 + numSeries: 9925 + labelValueCountByLabelName: + - name: __name__ + value: 5 + - name: job + value: 3 + memoryInBytesByLabelName: + - name: __name__ + value: 1024 + - name: job + value: 512 + seriesCountByLabelValuePair: + - name: job=prometheus + value: 100 + - name: instance=localhost:9090 + value: 100 + seriesCountByMetricName: + - name: up + value: 100 + - name: http_requests_total + value: 500 + status: success + default: + description: Error retrieving TSDB status. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/tsdb/blocks: + get: + tags: + - status + summary: Get TSDB blocks information + operationId: status-tsdb-blocks + responses: + "200": + description: TSDB blocks information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusTSDBBlocksOutputBody' + examples: + tsdbBlocks: + summary: TSDB block information + value: + data: + blocks: + - compaction: + level: 4 + sources: + - 01KBCJ7TR8A4QAJ3AA1J651P5S + - 01KBCS3J0E34567YPB8Y5W0E24 + - 01KBCZZ9KRTYGG3E7HVQFGC3S3 + maxTime: 1764763200000 + minTime: 1764568801099 + stats: + numChunks: 1073962 + numSamples: 129505582 + numSeries: 10661 + ulid: 01KC4D6GXQA4CRHYKV78NEBVAE + version: 1 + status: success + default: + description: Error retrieving TSDB blocks. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/walreplay: + get: + tags: + - status + summary: Get status walreplay + operationId: get-status-walreplay + responses: + "200": + description: WAL replay status retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusWALReplayOutputBody' + examples: + walReplay: + summary: WAL replay status + value: + data: + current: 3214 + max: 3214 + min: 3209 + status: success + default: + description: Error retrieving WAL replay status. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/delete_series: + put: + tags: + - admin + summary: Delete series matching selectors via PUT + description: Deletes data for a selection of series in a time range using PUT method. + operationId: deleteSeriesPut + parameters: + - name: match[] + in: query + description: Series selectors to identify series to delete. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{__name__=~"test.*"}' + - name: start + in: query + description: Start timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + responses: + "200": + description: Series deleted successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/DeleteSeriesOutputBody' + examples: + deletionSuccess: + summary: Successful series deletion + value: + status: success + default: + description: Error deleting series via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Delete series matching selectors + description: Deletes data for a selection of series in a time range. + operationId: deleteSeriesPost + parameters: + - name: match[] + in: query + description: Series selectors to identify series to delete. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{__name__=~"test.*"}' + - name: start + in: query + description: Start timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + responses: + "200": + description: Series deleted successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/DeleteSeriesOutputBody' + examples: + deletionSuccess: + summary: Successful series deletion + value: + status: success + default: + description: Error deleting series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/clean_tombstones: + put: + tags: + - admin + summary: Clean tombstones in the TSDB via PUT + description: Removes deleted data from disk and cleans up existing tombstones using PUT method. + operationId: cleanTombstonesPut + responses: + "200": + description: Tombstones cleaned successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/CleanTombstonesOutputBody' + examples: + tombstonesCleaned: + summary: Tombstones cleaned successfully + value: + status: success + default: + description: Error cleaning tombstones via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Clean tombstones in the TSDB + description: Removes deleted data from disk and cleans up existing tombstones. + operationId: cleanTombstonesPost + responses: + "200": + description: Tombstones cleaned successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/CleanTombstonesOutputBody' + examples: + tombstonesCleaned: + summary: Tombstones cleaned successfully + value: + status: success + default: + description: Error cleaning tombstones. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/snapshot: + put: + tags: + - admin + summary: Create a snapshot of the TSDB via PUT + description: Creates a snapshot of all current data using PUT method. + operationId: snapshotPut + parameters: + - name: skip_head + in: query + description: If true, do not snapshot data in the head block. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + responses: + "200": + description: Snapshot created successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/SnapshotOutputBody' + examples: + snapshotCreated: + summary: Snapshot created successfully + value: + data: + name: 20260102T133700Z-a1b2c3d4e5f67890 + status: success + default: + description: Error creating snapshot via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Create a snapshot of the TSDB + description: Creates a snapshot of all current data. + operationId: snapshotPost + parameters: + - name: skip_head + in: query + description: If true, do not snapshot data in the head block. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + responses: + "200": + description: Snapshot created successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/SnapshotOutputBody' + examples: + snapshotCreated: + summary: Snapshot created successfully + value: + data: + name: 20260102T133700Z-a1b2c3d4e5f67890 + status: success + default: + description: Error creating snapshot. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /read: + post: + tags: + - remote + summary: Remote read endpoint + description: Prometheus remote read endpoint for federated queries. Accepts and returns Protocol Buffer encoded data. + operationId: remoteRead + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /write: + post: + tags: + - remote + summary: Remote write endpoint + description: Prometheus remote write endpoint for sending metrics. Accepts Protocol Buffer encoded write requests. + operationId: remoteWrite + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /otlp/v1/metrics: + post: + tags: + - otlp + summary: OTLP metrics write endpoint + description: OpenTelemetry Protocol metrics ingestion endpoint. Accepts OTLP/HTTP metrics in Protocol Buffer format. + operationId: otlpWrite + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /notifications: + get: + tags: + - notifications + summary: Get notifications + operationId: get-notifications + responses: + "200": + description: Notifications retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationsOutputBody' + examples: + notifications: + summary: Server notifications + value: + data: + - active: true + date: "2026-01-02T16:14:50.046Z" + text: Configuration reload has failed. + status: success + default: + description: Error retrieving notifications. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /features: + get: + tags: + - features + summary: Get features + operationId: get-features + responses: + "200": + description: Feature flags retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FeaturesOutputBody' + examples: + enabledFeatures: + summary: Enabled feature flags + value: + data: + - exemplar-storage + - remote-write-receiver + status: success + default: + description: Error retrieving features. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error +components: + schemas: + Error: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + errorType: + type: string + description: Type of error that occurred. + example: bad_data + error: + type: string + description: Human-readable error message. + example: invalid parameter + required: + - status + - errorType + - error + additionalProperties: false + description: Error response. + Labels: + type: object + additionalProperties: true + description: Label set represented as a key-value map. + QueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/QueryData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for instant query. + QueryRangeOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/QueryData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for range query. + QueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The PromQL query to execute.' + example: up + time: + type: string + description: 'Form field: The evaluation timestamp (optional, defaults to current time).' + example: "2023-07-21T20:10:51.781Z" + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of metrics to return.' + example: 100 + timeout: + type: string + description: 'Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).' + example: 30s + lookback_delta: + type: string + description: 'Form field: Override the lookback period for this query (optional).' + example: 5m + stats: + type: string + description: 'Form field: When provided, include query statistics in the response (the special value ''all'' enables more comprehensive statistics).' + example: all + required: + - query + additionalProperties: false + description: POST request body for instant query. + QueryRangePostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to execute.' + example: rate(http_requests_total[5m]) + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:10:30.781Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T20:20:30.781Z" + step: + type: string + description: 'Form field: The step size of the query.' + example: 15s + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of metrics to return.' + example: 100 + timeout: + type: string + description: 'Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).' + example: 30s + lookback_delta: + type: string + description: 'Form field: Override the lookback period for this query (optional).' + example: 5m + stats: + type: string + description: 'Form field: When provided, include query statistics in the response (the special value ''all'' enables more comprehensive statistics).' + example: all + required: + - query + - start + - end + - step + additionalProperties: false + description: POST request body for range query. + QueryExemplarsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + QueryExemplarsPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to execute.' + example: http_requests_total + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + required: + - query + additionalProperties: false + description: POST request body for exemplars query. + FormatQueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: string + description: Formatted query string. + example: sum by(status) (rate(http_requests_total[5m])) + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for format query endpoint. + FormatQueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to format.' + example: sum(rate(http_requests_total[5m])) by (status) + required: + - query + additionalProperties: false + description: POST request body for format query. + ParseQueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + ParseQueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to parse.' + example: sum(rate(http_requests_total[5m])) + required: + - query + additionalProperties: false + description: POST request body for parse query. + QueryData: + anyOf: + - type: object + properties: + resultType: + type: string + enum: + - vector + result: + type: array + items: + anyOf: + - $ref: '#/components/schemas/FloatSample' + - $ref: '#/components/schemas/HistogramSample' + description: Array of samples (either float or histogram). + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - matrix + result: + type: array + items: + anyOf: + - $ref: '#/components/schemas/FloatSeries' + - $ref: '#/components/schemas/HistogramSeries' + description: Array of time series (either float or histogram). + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - scalar + result: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Scalar value as [timestamp, stringValue]. + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - string + result: + type: array + items: + type: string + maxItems: 2 + minItems: 2 + description: String value as [timestamp, stringValue]. + required: + - resultType + - result + additionalProperties: false + description: Query result data. The structure of 'result' depends on 'resultType'. + example: + result: + - metric: + __name__: up + job: prometheus + value: + - 1627845600 + - "1" + resultType: vector + FloatSample: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + value: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Timestamp and float value as [unixTimestamp, stringValue]. + example: + - 1767436620 + - "1" + required: + - metric + - value + additionalProperties: false + description: A sample with a float value. + HistogramSample: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + histogram: + type: array + items: + oneOf: + - type: number + - $ref: '#/components/schemas/HistogramValue' + maxItems: 2 + minItems: 2 + description: Timestamp and histogram value as [unixTimestamp, histogramObject]. + example: + - 1767436620 + - buckets: [] + count: "60" + sum: "120" + required: + - metric + - histogram + additionalProperties: false + description: A sample with a native histogram value. + FloatSeries: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + values: + type: array + items: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Array of [timestamp, stringValue] pairs for float values. + required: + - metric + - values + additionalProperties: false + description: A time series with float values. + HistogramSeries: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + histograms: + type: array + items: + type: array + items: + oneOf: + - type: number + - $ref: '#/components/schemas/HistogramValue' + maxItems: 2 + minItems: 2 + description: Array of [timestamp, histogramObject] pairs for histogram values. + required: + - metric + - histograms + additionalProperties: false + description: A time series with native histogram values. + HistogramValue: + type: object + properties: + count: + type: string + description: Total count of observations. + sum: + type: string + description: Sum of all observed values. + buckets: + type: array + items: + type: array + items: + oneOf: + - type: number + - type: string + description: Histogram buckets as [boundary_rule, lower, upper, count]. + required: + - count + - sum + additionalProperties: false + description: Native histogram value representation. + LabelsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + type: string + example: + - __name__ + - job + - instance + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of strings. + LabelsPostInputBody: + type: object + properties: + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + match[]: + type: array + items: + type: string + description: 'Form field: Series selector argument that selects the series from which to read the label names.' + example: + - '{job="prometheus"}' + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of label names to return.' + example: 100 + additionalProperties: false + description: POST request body for labels query. + LabelValuesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + type: string + example: + - __name__ + - job + - instance + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of strings. + SeriesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/Labels' + example: + - __name__: up + instance: localhost:9090 + job: prometheus + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of label sets. + SeriesPostInputBody: + type: object + properties: + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + match[]: + type: array + items: + type: string + description: 'Form field: Series selector argument that selects the series to return.' + example: + - '{job="prometheus"}' + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of series to return.' + example: 100 + required: + - match[] + additionalProperties: false + description: POST request body for series query. + SeriesDeleteOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + Metadata: + type: object + properties: + type: + type: string + description: Metric type (counter, gauge, histogram, summary, or untyped). + unit: + type: string + description: Unit of the metric. + help: + type: string + description: Help text describing the metric. + required: + - type + - unit + - help + additionalProperties: false + description: Metric metadata. + MetadataOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: object + additionalProperties: + type: array + items: + $ref: '#/components/schemas/Metadata' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for metadata endpoint. + MetricMetadata: + type: object + properties: + target: + $ref: '#/components/schemas/Labels' + metric: + type: string + description: Metric name. + type: + type: string + description: Metric type (counter, gauge, histogram, summary, or untyped). + help: + type: string + description: Help text describing the metric. + unit: + type: string + description: Unit of the metric. + required: + - target + - type + - help + - unit + additionalProperties: false + description: Target metric metadata. + Target: + type: object + properties: + discoveredLabels: + $ref: '#/components/schemas/Labels' + labels: + $ref: '#/components/schemas/Labels' + scrapePool: + type: string + description: Name of the scrape pool. + scrapeUrl: + type: string + description: URL of the target. + globalUrl: + type: string + description: Global URL of the target. + lastError: + type: string + description: Last error message from scraping. + lastScrape: + type: string + format: date-time + description: Timestamp of the last scrape. + lastScrapeDuration: + type: number + format: double + description: Duration of the last scrape in seconds. + health: + type: string + description: Health status of the target (up, down, or unknown). + scrapeInterval: + type: string + description: Scrape interval for this target. + scrapeTimeout: + type: string + description: Scrape timeout for this target. + required: + - discoveredLabels + - labels + - scrapePool + - scrapeUrl + - globalUrl + - lastError + - lastScrape + - lastScrapeDuration + - health + - scrapeInterval + - scrapeTimeout + additionalProperties: false + description: Scrape target information. + DroppedTarget: + type: object + properties: + discoveredLabels: + $ref: '#/components/schemas/Labels' + scrapePool: + type: string + description: Name of the scrape pool. + required: + - discoveredLabels + - scrapePool + additionalProperties: false + description: Dropped target information. + TargetDiscovery: + type: object + properties: + activeTargets: + type: array + items: + $ref: '#/components/schemas/Target' + droppedTargets: + type: array + items: + $ref: '#/components/schemas/DroppedTarget' + droppedTargetCounts: + type: object + additionalProperties: + type: integer + format: int64 + required: + - activeTargets + - droppedTargets + - droppedTargetCounts + additionalProperties: false + description: Target discovery information including active and dropped targets. + TargetsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/TargetDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for targets endpoint. + TargetMetadataOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/MetricMetadata' + example: + - help: The current health status of the target + metric: up + target: + instance: localhost:9090 + job: prometheus + type: gauge + unit: "" + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of metric metadata. + ScrapePoolsDiscovery: + type: object + properties: + scrapePools: + type: array + items: + type: string + required: + - scrapePools + additionalProperties: false + description: List of all configured scrape pools. + ScrapePoolsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/ScrapePoolsDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for scrape pools endpoint. + Config: + type: object + properties: + source_labels: + type: array + items: + type: string + description: Source labels for relabeling. + separator: + type: string + description: Separator for source label values. + regex: + type: string + description: Regular expression for matching. + modulus: + type: integer + format: int64 + description: Modulus for hash-based relabeling. + target_label: + type: string + description: Target label name. + replacement: + type: string + description: Replacement value. + action: + type: string + description: Relabel action. + additionalProperties: false + description: Relabel configuration. + RelabelStep: + type: object + properties: + rule: + $ref: '#/components/schemas/Config' + output: + $ref: '#/components/schemas/Labels' + keep: + type: boolean + required: + - rule + - output + - keep + additionalProperties: false + description: Relabel step showing the rule, output, and whether the target was kept. + RelabelStepsResponse: + type: object + properties: + steps: + type: array + items: + $ref: '#/components/schemas/RelabelStep' + required: + - steps + additionalProperties: false + description: Relabeling steps response. + TargetRelabelStepsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RelabelStepsResponse' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for target relabel steps endpoint. + RuleGroup: + type: object + properties: + name: + type: string + description: Name of the rule group. + file: + type: string + description: File containing the rule group. + rules: + type: array + items: + type: object + description: Rule definition. + description: Rules in this group. + interval: + type: number + format: double + description: Evaluation interval in seconds. + limit: + type: integer + format: int64 + description: Maximum number of alerts for this group. + evaluationTime: + type: number + format: double + description: Time taken to evaluate the group in seconds. + lastEvaluation: + type: string + format: date-time + description: Timestamp of the last evaluation. + required: + - name + - file + - rules + - interval + - limit + - evaluationTime + - lastEvaluation + additionalProperties: false + description: Rule group information. + RuleDiscovery: + type: object + properties: + groups: + type: array + items: + $ref: '#/components/schemas/RuleGroup' + groupNextToken: + type: string + description: Pagination token for the next page of groups. + required: + - groups + additionalProperties: false + description: Rule discovery information containing all rule groups. + RulesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RuleDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for rules endpoint. + Alert: + type: object + properties: + labels: + $ref: '#/components/schemas/Labels' + annotations: + $ref: '#/components/schemas/Labels' + state: + type: string + description: State of the alert (pending, firing, or inactive). + value: + type: string + description: Value of the alert expression. + activeAt: + type: string + format: date-time + description: Timestamp when the alert became active. + keepFiringSince: + type: string + format: date-time + description: Timestamp since the alert has been kept firing. + required: + - labels + - annotations + - state + - value + additionalProperties: false + description: Alert information. + AlertDiscovery: + type: object + properties: + alerts: + type: array + items: + $ref: '#/components/schemas/Alert' + required: + - alerts + additionalProperties: false + description: Alert discovery information containing all active alerts. + AlertsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/AlertDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for alerts endpoint. + AlertmanagerTarget: + type: object + properties: + url: + type: string + description: URL of the Alertmanager instance. + required: + - url + additionalProperties: false + description: Alertmanager target information. + AlertmanagerDiscovery: + type: object + properties: + activeAlertmanagers: + type: array + items: + $ref: '#/components/schemas/AlertmanagerTarget' + droppedAlertmanagers: + type: array + items: + $ref: '#/components/schemas/AlertmanagerTarget' + required: + - activeAlertmanagers + - droppedAlertmanagers + additionalProperties: false + description: Alertmanager discovery information including active and dropped instances. + AlertmanagersOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/AlertmanagerDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for alertmanagers endpoint. + StatusConfigData: + type: object + properties: + yaml: + type: string + description: Prometheus configuration in YAML format. + required: + - yaml + additionalProperties: false + description: Prometheus configuration. + StatusConfigOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusConfigData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status config endpoint. + RuntimeInfo: + type: object + properties: + startTime: + type: string + format: date-time + CWD: + type: string + hostname: + type: string + serverTime: + type: string + format: date-time + reloadConfigSuccess: + type: boolean + lastConfigTime: + type: string + format: date-time + corruptionCount: + type: integer + format: int64 + goroutineCount: + type: integer + format: int64 + GOMAXPROCS: + type: integer + format: int64 + GOMEMLIMIT: + type: integer + format: int64 + GOGC: + type: string + GODEBUG: + type: string + storageRetention: + type: string + required: + - startTime + - CWD + - hostname + - serverTime + - reloadConfigSuccess + - lastConfigTime + - corruptionCount + - goroutineCount + - GOMAXPROCS + - GOMEMLIMIT + - GOGC + - GODEBUG + - storageRetention + additionalProperties: false + description: Prometheus runtime information. + StatusRuntimeInfoOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RuntimeInfo' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status runtime info endpoint. + PrometheusVersion: + type: object + properties: + version: + type: string + revision: + type: string + branch: + type: string + buildUser: + type: string + buildDate: + type: string + goVersion: + type: string + required: + - version + - revision + - branch + - buildUser + - buildDate + - goVersion + additionalProperties: false + description: Prometheus version information. + StatusBuildInfoOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/PrometheusVersion' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status build info endpoint. + StatusFlagsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: object + additionalProperties: + type: string + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status flags endpoint. + HeadStats: + type: object + properties: + numSeries: + type: integer + format: int64 + numLabelPairs: + type: integer + format: int64 + chunkCount: + type: integer + format: int64 + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + required: + - numSeries + - numLabelPairs + - chunkCount + - minTime + - maxTime + additionalProperties: false + description: TSDB head statistics. + TSDBStat: + type: object + properties: + name: + type: string + value: + type: integer + format: int64 + required: + - name + - value + additionalProperties: false + description: TSDB statistic. + TSDBStatus: + type: object + properties: + headStats: + $ref: '#/components/schemas/HeadStats' + seriesCountByMetricName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + labelValueCountByLabelName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + memoryInBytesByLabelName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + seriesCountByLabelValuePair: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + required: + - headStats + - seriesCountByMetricName + - labelValueCountByLabelName + - memoryInBytesByLabelName + - seriesCountByLabelValuePair + additionalProperties: false + description: TSDB status information. + StatusTSDBOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/TSDBStatus' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status TSDB endpoint. + BlockDesc: + type: object + properties: + ulid: + type: string + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + required: + - ulid + - minTime + - maxTime + additionalProperties: false + description: Block descriptor. + BlockStats: + type: object + properties: + numSamples: + type: integer + format: int64 + numSeries: + type: integer + format: int64 + numChunks: + type: integer + format: int64 + numTombstones: + type: integer + format: int64 + numFloatSamples: + type: integer + format: int64 + numHistogramSamples: + type: integer + format: int64 + additionalProperties: false + description: Block statistics. + BlockMetaCompaction: + type: object + properties: + level: + type: integer + format: int64 + sources: + type: array + items: + type: string + parents: + type: array + items: + $ref: '#/components/schemas/BlockDesc' + failed: + type: boolean + deletable: + type: boolean + hints: + type: array + items: + type: string + required: + - level + additionalProperties: false + description: Block compaction metadata. + BlockMeta: + type: object + properties: + ulid: + type: string + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + stats: + $ref: '#/components/schemas/BlockStats' + compaction: + $ref: '#/components/schemas/BlockMetaCompaction' + version: + type: integer + format: int64 + required: + - ulid + - minTime + - maxTime + - compaction + - version + additionalProperties: false + description: Block metadata. + StatusTSDBBlocksData: + type: object + properties: + blocks: + type: array + items: + $ref: '#/components/schemas/BlockMeta' + required: + - blocks + additionalProperties: false + description: TSDB blocks information. + StatusTSDBBlocksOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusTSDBBlocksData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status TSDB blocks endpoint. + StatusWALReplayData: + type: object + properties: + min: + type: integer + format: int64 + max: + type: integer + format: int64 + current: + type: integer + format: int64 + required: + - min + - max + - current + additionalProperties: false + description: WAL replay status. + StatusWALReplayOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusWALReplayData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status WAL replay endpoint. + DeleteSeriesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + additionalProperties: false + description: Response body containing only status. + CleanTombstonesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + additionalProperties: false + description: Response body containing only status. + DataStruct: + type: object + properties: + name: + type: string + required: + - name + additionalProperties: false + description: Generic data structure with a name field. + SnapshotOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/DataStruct' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for snapshot endpoint. + Notification: + type: object + properties: + text: + type: string + date: + type: string + format: date-time + active: + type: boolean + required: + - text + - date + - active + additionalProperties: false + description: Server notification. + NotificationsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/Notification' + example: + - active: true + date: "2023-07-21T20:00:00.000Z" + text: Server is running + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of notifications. + FeaturesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. +tags: + - name: query + description: Query and evaluate PromQL expressions. + - name: metadata + description: Retrieve metric metadata such as type and unit. + - name: labels + description: Query label names and values. + - name: series + description: Query and manage time series. + - name: targets + description: Retrieve target and scrape pool information. + - name: rules + description: Query recording and alerting rules. + - name: alerts + description: Query active alerts and alertmanager discovery. + - name: status + description: Retrieve server status and configuration. + - name: admin + description: Administrative operations for TSDB management. + - name: features + description: Query enabled features. + - name: remote + description: Remote read and write endpoints. + - name: otlp + description: OpenTelemetry Protocol metrics ingestion. + - name: notifications + description: Server notifications and events. diff --git a/web/api/v1/testdata/openapi_3.2_golden.yaml b/web/api/v1/testdata/openapi_3.2_golden.yaml new file mode 100644 index 0000000000..f122408013 --- /dev/null +++ b/web/api/v1/testdata/openapi_3.2_golden.yaml @@ -0,0 +1,4452 @@ +openapi: 3.2.0 +info: + title: Prometheus API + description: Prometheus is an Open-Source monitoring system with a dimensional data model, flexible query language, efficient time series database and modern alerting approach. + contact: + name: Prometheus Community + url: https://prometheus.io/community/ + version: 0.0.1-undefined +servers: + - url: /api/v1 +paths: + /query: + get: + tags: + - query + summary: Evaluate an instant query + operationId: query + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: time + in: query + description: The evaluation timestamp (optional, defaults to current time). + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: query + in: query + description: The PromQL query to execute. + required: true + explode: false + schema: + type: string + examples: + example: + value: up + - name: timeout + in: query + description: Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag. + required: false + explode: false + schema: + type: string + examples: + example: + value: 30s + - name: lookback_delta + in: query + description: Override the lookback period for this query. Optional. + required: false + explode: false + schema: + type: string + examples: + example: + value: 5m + - name: stats + in: query + description: When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics. + required: false + explode: false + schema: + type: string + examples: + example: + value: all + responses: + "200": + description: Query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryOutputBody' + examples: + vectorResult: + summary: 'Instant vector query: up' + value: {"status": "success", "data": {"resultType": "vector", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "value": [1767436620, "1"]}, {"metric": {"__name__": "up", "env": "demo", "instance": "demo.prometheus.io:9093", "job": "alertmanager"}, "value": [1767436620, "1"]}]}} + scalarResult: + summary: 'Scalar query: scalar(42)' + value: + data: + result: + - 1767436620 + - "42" + resultType: scalar + status: success + matrixResult: + summary: 'Range vector query: up[5m]' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767436320, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Evaluate an instant query + operationId: query-post + requestBody: + description: Submit an instant query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryPostInputBody' + examples: + simpleQuery: + summary: Simple instant query + value: + query: up + queryWithTime: + summary: Query with specific timestamp + value: + query: up{job="prometheus"} + time: "2026-01-02T13:37:00.000Z" + queryWithLimit: + summary: Query with limit and statistics + value: + limit: 100 + query: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + stats: all + required: true + responses: + "200": + description: Instant query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryOutputBody' + examples: + vectorResult: + summary: 'Instant vector query: up' + value: {"status": "success", "data": {"resultType": "vector", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "value": [1767436620, "1"]}, {"metric": {"__name__": "up", "env": "demo", "instance": "demo.prometheus.io:9093", "job": "alertmanager"}, "value": [1767436620, "1"]}]}} + scalarResult: + summary: 'Scalar query: scalar(42)' + value: + data: + result: + - 1767436620 + - "42" + resultType: scalar + status: success + matrixResult: + summary: 'Range vector query: up[5m]' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767436320, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing instant query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /query_range: + get: + tags: + - query + summary: Evaluate a range query + operationId: query-range + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: start + in: query + description: The start time of the query. + required: true + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: The end time of the query. + required: true + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: step + in: query + description: The step size of the query. + required: true + explode: false + schema: + type: string + examples: + example: + value: 15s + - name: query + in: query + description: The query to execute. + required: true + explode: false + schema: + type: string + examples: + example: + value: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + - name: timeout + in: query + description: Evaluation timeout. Optional. Defaults to and is capped by the value of the -query.timeout flag. + required: false + explode: false + schema: + type: string + examples: + example: + value: 30s + - name: lookback_delta + in: query + description: Override the lookback period for this query. Optional. + required: false + explode: false + schema: + type: string + examples: + example: + value: 5m + - name: stats + in: query + description: When provided, include query statistics in the response. The special value 'all' enables more comprehensive statistics. + required: false + explode: false + schema: + type: string + examples: + example: + value: all + responses: + "200": + description: Range query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRangeOutputBody' + examples: + matrixResult: + summary: 'Range query: rate(prometheus_http_requests_total[5m])' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767433020, "1"], [1767434820, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing range query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Evaluate a range query + operationId: query-range-post + requestBody: + description: Submit a range query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryRangePostInputBody' + examples: + basicRange: + summary: Basic range query + value: + end: "2026-01-02T13:37:00.000Z" + query: up + start: "2026-01-02T12:37:00.000Z" + step: 15s + rateQuery: + summary: Rate calculation over time range + value: + end: "2026-01-02T13:37:00.000Z" + query: rate(prometheus_http_requests_total{handler="/api/v1/query"}[5m]) + start: "2026-01-02T12:37:00.000Z" + step: 30s + timeout: 30s + required: true + responses: + "200": + description: Range query executed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRangeOutputBody' + examples: + matrixResult: + summary: 'Range query: rate(prometheus_http_requests_total[5m])' + value: {"status": "success", "data": {"resultType": "matrix", "result": [{"metric": {"__name__": "up", "instance": "demo.prometheus.io:9090", "job": "prometheus"}, "values": [[1767433020, "1"], [1767434820, "1"], [1767436620, "1"]]}]}} + default: + description: Error executing range query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /query_exemplars: + get: + tags: + - query + summary: Query exemplars + operationId: query-exemplars + parameters: + - name: start + in: query + description: Start timestamp for exemplars query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for exemplars query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: query + in: query + description: PromQL query to extract exemplars for. + required: true + explode: false + schema: + type: string + examples: + example: + value: prometheus_http_requests_total + responses: + "200": + description: Exemplars retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExemplarsOutputBody' + examples: + exemplarsResult: + summary: Exemplars for a metric with trace IDs + value: + data: + - exemplars: + - labels: + traceID: abc123def456 + timestamp: 1.689956451781e+09 + value: "1.5" + seriesLabels: + __name__: http_requests_total + job: api-server + method: GET + status: success + default: + description: Error retrieving exemplars. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Query exemplars + operationId: query-exemplars-post + requestBody: + description: Submit an exemplars query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/QueryExemplarsPostInputBody' + examples: + basicExemplar: + summary: Query exemplars for a metric + value: + query: prometheus_http_requests_total + exemplarWithTimeRange: + summary: Exemplars within specific time range + value: + end: "2026-01-02T13:37:00.000Z" + query: prometheus_http_requests_total{job="prometheus"} + start: "2026-01-02T12:37:00.000Z" + required: true + responses: + "200": + description: Exemplars query completed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryExemplarsOutputBody' + examples: + exemplarsResult: + summary: Exemplars for a metric with trace IDs + value: + data: + - exemplars: + - labels: + traceID: abc123def456 + timestamp: 1.689956451781e+09 + value: "1.5" + seriesLabels: + __name__: http_requests_total + job: api-server + method: GET + status: success + default: + description: Error processing exemplars query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /format_query: + get: + tags: + - query + summary: Format a PromQL query + operationId: format-query + parameters: + - name: query + in: query + description: PromQL expression to format. + required: true + explode: false + schema: + type: string + examples: + example: + value: sum(rate(http_requests_total[5m])) by (job) + responses: + "200": + description: Query formatted successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FormatQueryOutputBody' + examples: + formattedQuery: + summary: Formatted PromQL query + value: + data: sum by(job, status) (rate(http_requests_total[5m])) + status: success + default: + description: Error formatting query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Format a PromQL query + operationId: format-query-post + requestBody: + description: Submit a PromQL query to format. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/FormatQueryPostInputBody' + examples: + simpleFormat: + summary: Format a simple query + value: + query: up{job="prometheus"} + complexFormat: + summary: Format a complex query + value: + query: sum(rate(http_requests_total[5m])) by (job, status) + required: true + responses: + "200": + description: Query formatting completed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FormatQueryOutputBody' + examples: + formattedQuery: + summary: Formatted PromQL query + value: + data: sum by(job, status) (rate(http_requests_total[5m])) + status: success + default: + description: Error formatting query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /parse_query: + get: + tags: + - query + summary: Parse a PromQL query + operationId: parse-query + parameters: + - name: query + in: query + description: PromQL expression to parse. + required: true + explode: false + schema: + type: string + examples: + example: + value: up{job="prometheus"} + responses: + "200": + description: Query parsed successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/ParseQueryOutputBody' + examples: + parsedQuery: + summary: Parsed PromQL expression tree + value: + data: + resultType: vector + status: success + default: + description: Error parsing query. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - query + summary: Parse a PromQL query + operationId: parse-query-post + requestBody: + description: Submit a PromQL query to parse. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/ParseQueryPostInputBody' + examples: + simpleParse: + summary: Parse a simple query + value: + query: up + complexParse: + summary: Parse a complex query + value: + query: rate(http_requests_total{job="api"}[5m]) + required: true + responses: + "200": + description: Query parsed successfully via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/ParseQueryOutputBody' + examples: + parsedQuery: + summary: Parsed PromQL expression tree + value: + data: + resultType: vector + status: success + default: + description: Error parsing query via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /labels: + get: + tags: + - labels + summary: Get label names + operationId: labels + parameters: + - name: start + in: query + description: Start timestamp for label names query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for label names query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of label names to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + responses: + "200": + description: Label names retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsOutputBody' + examples: + labelNames: + summary: List of label names + value: + data: + - __name__ + - active + - address + - alertmanager + - alertname + - alertstate + - backend + - branch + - code + - collector + - component + - device + - env + - endpoint + - fstype + - handler + - instance + - job + - le + - method + - mode + - name + status: success + default: + description: Error retrieving label names. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - labels + summary: Get label names + operationId: labels-post + requestBody: + description: Submit a label names query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/LabelsPostInputBody' + examples: + allLabels: + summary: Get all label names + value: {} + labelsWithTimeRange: + summary: Get label names within time range + value: + end: "2026-01-02T13:37:00.000Z" + start: "2026-01-02T12:37:00.000Z" + labelsWithMatch: + summary: Get label names matching series selector + value: + match[]: + - up + - process_start_time_seconds{job="prometheus"} + required: true + responses: + "200": + description: Label names retrieved successfully via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelsOutputBody' + examples: + labelNames: + summary: List of label names + value: + data: + - __name__ + - active + - address + - alertmanager + - alertname + - alertstate + - backend + - branch + - code + - collector + - component + - device + - env + - endpoint + - fstype + - handler + - instance + - job + - le + - method + - mode + - name + status: success + default: + description: Error retrieving label names via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /label/{name}/values: + get: + tags: + - labels + summary: Get label values + operationId: label-values + parameters: + - name: name + in: path + description: Label name. + required: true + schema: + type: string + - name: start + in: query + description: Start timestamp for label values query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for label values query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of label values to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 1000 + responses: + "200": + description: Label values retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/LabelValuesOutputBody' + examples: + labelValues: + summary: List of values for a label + value: + data: + - alertmanager + - blackbox + - caddy + - cadvisor + - grafana + - node + - prometheus + - random + status: success + default: + description: Error retrieving label values. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /series: + get: + tags: + - series + summary: Find series by label matchers + operationId: series + parameters: + - name: start + in: query + description: Start timestamp for series query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for series query. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + - name: match[] + in: query + description: Series selector argument. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{job="prometheus"}' + - name: limit + in: query + description: Maximum number of series to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + responses: + "200": + description: Series returned matching the provided label matchers. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesOutputBody' + examples: + seriesList: + summary: List of series matching the selector + value: + data: + - __name__: up + env: demo + instance: demo.prometheus.io:8080 + job: cadvisor + - __name__: up + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + - __name__: up + env: demo + instance: demo.prometheus.io:9100 + job: node + - __name__: up + instance: demo.prometheus.io:3000 + job: grafana + - __name__: up + instance: demo.prometheus.io:8996 + job: random + status: success + default: + description: Error retrieving series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - series + summary: Find series by label matchers + operationId: series-post + requestBody: + description: Submit a series query. This endpoint accepts the same parameters as the GET version. + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/SeriesPostInputBody' + examples: + seriesMatch: + summary: Find series by label matchers + value: + match[]: + - up + seriesWithTimeRange: + summary: Find series with time range + value: + end: "2026-01-02T13:37:00.000Z" + match[]: + - up + - process_cpu_seconds_total{job="prometheus"} + start: "2026-01-02T12:37:00.000Z" + required: true + responses: + "200": + description: Series returned matching the provided label matchers via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesOutputBody' + examples: + seriesList: + summary: List of series matching the selector + value: + data: + - __name__: up + env: demo + instance: demo.prometheus.io:8080 + job: cadvisor + - __name__: up + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + - __name__: up + env: demo + instance: demo.prometheus.io:9100 + job: node + - __name__: up + instance: demo.prometheus.io:3000 + job: grafana + - __name__: up + instance: demo.prometheus.io:8996 + job: random + status: success + default: + description: Error retrieving series via POST. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + delete: + tags: + - series + summary: Delete series + description: 'Delete series matching selectors. Note: This is deprecated, use POST /admin/tsdb/delete_series instead.' + operationId: delete-series + responses: + "200": + description: Series marked for deletion. + content: + application/json: + schema: + $ref: '#/components/schemas/SeriesDeleteOutputBody' + examples: + seriesDeleted: + summary: Series marked for deletion + value: + status: success + default: + description: Error deleting series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /metadata: + get: + tags: + - metadata + summary: Get metadata + operationId: get-metadata + parameters: + - name: limit + in: query + description: The maximum number of metrics to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: limit_per_metric + in: query + description: The maximum number of metadata entries per metric. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + - name: metric + in: query + description: A metric name to filter metadata for. + required: false + explode: false + schema: + type: string + examples: + example: + value: http_requests_total + responses: + "200": + description: Metric metadata retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/MetadataOutputBody' + examples: + metricMetadata: + summary: Metadata for metrics + value: + data: + go_gc_stack_starting_size_bytes: + - help: The stack size of new goroutines. Sourced from /gc/stack/starting-size:bytes. + type: gauge + unit: "" + prometheus_rule_group_iterations_missed_total: + - help: The total number of rule group evaluations missed due to slow rule group evaluation. + type: counter + unit: "" + prometheus_sd_updates_total: + - help: Total number of update events sent to the SD consumers. + type: counter + unit: "" + status: success + default: + description: Error retrieving metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /scrape_pools: + get: + tags: + - targets + summary: Get scrape pools + operationId: get-scrape-pools + responses: + "200": + description: Scrape pools retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/ScrapePoolsOutputBody' + examples: + scrapePoolsList: + summary: List of scrape pool names + value: + data: + scrapePools: + - alertmanager + - blackbox + - caddy + - cadvisor + - grafana + - node + - prometheus + - random + status: success + default: + description: Error retrieving scrape pools. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets: + get: + tags: + - targets + summary: Get targets + operationId: get-targets + parameters: + - name: scrapePool + in: query + description: Filter targets by scrape pool name. + required: false + explode: false + schema: + type: string + examples: + example: + value: prometheus + - name: state + in: query + description: 'Filter by state: active, dropped, or any.' + required: false + explode: false + schema: + type: string + examples: + example: + value: active + responses: + "200": + description: Target discovery information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetsOutputBody' + examples: + targetsList: + summary: Active and dropped targets + value: + data: + activeTargets: + - discoveredLabels: + __address__: demo.prometheus.io:9093 + __meta_filepath: /etc/prometheus/file_sd/alertmanager.yml + __metrics_path__: /metrics + __scheme__: http + env: demo + job: alertmanager + globalUrl: http://demo.prometheus.io:9093/metrics + health: up + labels: + env: demo + instance: demo.prometheus.io:9093 + job: alertmanager + lastError: "" + lastScrape: "2026-01-02T13:36:40.200Z" + lastScrapeDuration: 0.006576866 + scrapeInterval: 15s + scrapePool: alertmanager + scrapeTimeout: 10s + scrapeUrl: http://demo.prometheus.io:9093/metrics + droppedTargetCounts: + alertmanager: 0 + blackbox: 0 + caddy: 0 + cadvisor: 0 + grafana: 0 + node: 0 + prometheus: 0 + random: 0 + droppedTargets: [] + status: success + default: + description: Error retrieving targets. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets/metadata: + get: + tags: + - targets + summary: Get targets metadata + operationId: get-targets-metadata + parameters: + - name: match_target + in: query + description: Label selector to filter targets. + required: false + explode: false + schema: + type: string + examples: + example: + value: '{job="prometheus"}' + - name: metric + in: query + description: Metric name to retrieve metadata for. + required: false + explode: false + schema: + type: string + examples: + example: + value: http_requests_total + - name: limit + in: query + description: Maximum number of targets to match. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + responses: + "200": + description: Target metadata retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetMetadataOutputBody' + examples: + targetMetadata: + summary: Metadata for targets + value: + data: + - help: The current health status of the target + metric: up + target: + instance: localhost:9090 + job: prometheus + type: gauge + unit: "" + status: success + default: + description: Error retrieving target metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /targets/relabel_steps: + get: + tags: + - targets + summary: Get targets relabel steps + operationId: get-targets-relabel-steps + parameters: + - name: scrapePool + in: query + description: Name of the scrape pool. + required: true + explode: false + schema: + type: string + examples: + example: + value: prometheus + - name: labels + in: query + description: JSON-encoded labels to apply relabel rules to. + required: true + explode: false + schema: + type: string + examples: + example: + value: '{"__address__":"localhost:9090","job":"prometheus"}' + responses: + "200": + description: Relabel steps retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/TargetRelabelStepsOutputBody' + examples: + relabelSteps: + summary: Relabel steps for a target + value: + data: + steps: + - keep: true + output: + __address__: localhost:9090 + instance: localhost:9090 + job: prometheus + rule: + action: replace + regex: (.*) + replacement: $1 + source_labels: + - __address__ + target_label: instance + status: success + default: + description: Error retrieving relabel steps. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /rules: + get: + tags: + - rules + summary: Get alerting and recording rules + operationId: rules + parameters: + - name: type + in: query + description: 'Filter by rule type: alert or record.' + required: false + explode: false + schema: + type: string + examples: + example: + value: alert + - name: rule_name[] + in: query + description: Filter by rule name. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - HighErrorRate + - name: rule_group[] + in: query + description: Filter by rule group name. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - example_alerts + - name: file[] + in: query + description: Filter by file path. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - /etc/prometheus/rules.yml + - name: match[] + in: query + description: Label matchers to filter rules. + required: false + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{severity="critical"}' + - name: exclude_alerts + in: query + description: Exclude active alerts from response. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + - name: group_limit + in: query + description: Maximum number of rule groups to return. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 100 + - name: group_next_token + in: query + description: Pagination token for next page. + required: false + explode: false + schema: + type: string + examples: + example: + value: abc123 + responses: + "200": + description: Rules retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/RulesOutputBody' + examples: + ruleGroups: + summary: Alerting and recording rules + value: + data: + groups: + - evaluationTime: 0.000561635 + file: /etc/prometheus/rules/ansible_managed.yml + interval: 15 + lastEvaluation: "2026-01-02T13:36:56.874Z" + limit: 0 + name: ansible managed alert rules + rules: + - annotations: + description: This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the "DeadMansSnitch" integration in PagerDuty. + summary: Ensure entire alerting pipeline is functional + duration: 600 + evaluationTime: 0.000356688 + health: ok + keepFiringFor: 0 + labels: + severity: warning + lastEvaluation: "2026-01-02T13:36:56.874Z" + name: Watchdog + query: vector(1) + state: firing + type: alerting + status: success + default: + description: Error retrieving rules. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /alerts: + get: + tags: + - alerts + summary: Get active alerts + operationId: alerts + responses: + "200": + description: Active alerts retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/AlertsOutputBody' + examples: + activeAlerts: + summary: Currently active alerts + value: + data: + alerts: + - activeAt: "2026-01-02T13:30:00.000Z" + annotations: + description: This is an alert meant to ensure that the entire alerting pipeline is functional. This alert is always firing, therefore it should always be firing in Alertmanager and always fire against a receiver. There are integrations with various notification mechanisms that send a notification when this alert is not firing. For example the "DeadMansSnitch" integration in PagerDuty. + summary: Ensure entire alerting pipeline is functional + labels: + alertname: Watchdog + severity: warning + state: firing + value: "1e+00" + status: success + default: + description: Error retrieving alerts. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /alertmanagers: + get: + tags: + - alerts + summary: Get Alertmanager discovery + operationId: alertmanagers + responses: + "200": + description: Alertmanager targets retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/AlertmanagersOutputBody' + examples: + alertmanagerDiscovery: + summary: Alertmanager discovery results + value: + data: + activeAlertmanagers: + - url: http://demo.prometheus.io:9093/api/v2/alerts + droppedAlertmanagers: [] + status: success + default: + description: Error retrieving Alertmanager targets. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/config: + get: + tags: + - status + summary: Get status config + operationId: get-status-config + responses: + "200": + description: Configuration retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusConfigOutputBody' + examples: + configYAML: + summary: Prometheus configuration + value: + data: + yaml: | + global: + scrape_interval: 15s + scrape_timeout: 10s + evaluation_interval: 15s + external_labels: + environment: demo-prometheus-io + alerting: + alertmanagers: + - scheme: http + static_configs: + - targets: + - demo.prometheus.io:9093 + rule_files: + - /etc/prometheus/rules/*.yml + status: success + default: + description: Error retrieving configuration. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/runtimeinfo: + get: + tags: + - status + summary: Get status runtimeinfo + operationId: get-status-runtimeinfo + responses: + "200": + description: Runtime information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusRuntimeInfoOutputBody' + examples: + runtimeInfo: + summary: Runtime information + value: + data: + CWD: / + GODEBUG: "" + GOGC: "75" + GOMAXPROCS: 2 + GOMEMLIMIT: 3703818240 + corruptionCount: 0 + goroutineCount: 88 + hostname: demo-prometheus-io + lastConfigTime: "2026-01-01T13:37:00.000Z" + reloadConfigSuccess: true + serverTime: "2026-01-02T13:37:00.000Z" + startTime: "2026-01-01T13:37:00.000Z" + storageRetention: 31d + status: success + default: + description: Error retrieving runtime information. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/buildinfo: + get: + tags: + - status + summary: Get status buildinfo + operationId: get-status-buildinfo + responses: + "200": + description: Build information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusBuildInfoOutputBody' + examples: + buildInfo: + summary: Build information + value: + data: + branch: HEAD + buildDate: 20251030-07:26:10 + buildUser: root@08c890a84441 + goVersion: go1.25.3 + revision: 0a41f0000705c69ab8e0f9a723fc73e39ed62b07 + version: 3.7.3 + status: success + default: + description: Error retrieving build information. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/flags: + get: + tags: + - status + summary: Get status flags + operationId: get-status-flags + responses: + "200": + description: Command-line flags retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusFlagsOutputBody' + examples: + flags: + summary: Command-line flags + value: + data: + agent: "false" + alertmanager.notification-queue-capacity: "10000" + config.file: /etc/prometheus/prometheus.yml + enable-feature: exemplar-storage,native-histograms + query.max-concurrency: "20" + query.timeout: 2m + storage.tsdb.path: /prometheus + storage.tsdb.retention.time: 15d + web.console.libraries: /usr/share/prometheus/console_libraries + web.console.templates: /usr/share/prometheus/consoles + web.enable-admin-api: "true" + web.enable-lifecycle: "true" + web.listen-address: 0.0.0.0:9090 + web.page-title: Prometheus Time Series Collection and Processing Server + status: success + default: + description: Error retrieving flags. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/tsdb: + get: + tags: + - status + summary: Get TSDB status + operationId: status-tsdb + parameters: + - name: limit + in: query + description: The maximum number of items to return per category. + required: false + explode: false + schema: + type: integer + format: int64 + examples: + example: + value: 10 + responses: + "200": + description: TSDB status retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusTSDBOutputBody' + examples: + tsdbStats: + summary: TSDB statistics + value: + data: + headStats: + chunkCount: 37525 + maxTime: 1767436620000 + minTime: 1767362400712 + numLabelPairs: 2512 + numSeries: 9925 + labelValueCountByLabelName: + - name: __name__ + value: 5 + - name: job + value: 3 + memoryInBytesByLabelName: + - name: __name__ + value: 1024 + - name: job + value: 512 + seriesCountByLabelValuePair: + - name: job=prometheus + value: 100 + - name: instance=localhost:9090 + value: 100 + seriesCountByMetricName: + - name: up + value: 100 + - name: http_requests_total + value: 500 + status: success + default: + description: Error retrieving TSDB status. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/tsdb/blocks: + get: + tags: + - status + summary: Get TSDB blocks information + operationId: status-tsdb-blocks + responses: + "200": + description: TSDB blocks information retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusTSDBBlocksOutputBody' + examples: + tsdbBlocks: + summary: TSDB block information + value: + data: + blocks: + - compaction: + level: 4 + sources: + - 01KBCJ7TR8A4QAJ3AA1J651P5S + - 01KBCS3J0E34567YPB8Y5W0E24 + - 01KBCZZ9KRTYGG3E7HVQFGC3S3 + maxTime: 1764763200000 + minTime: 1764568801099 + stats: + numChunks: 1073962 + numSamples: 129505582 + numSeries: 10661 + ulid: 01KC4D6GXQA4CRHYKV78NEBVAE + version: 1 + status: success + default: + description: Error retrieving TSDB blocks. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /status/walreplay: + get: + tags: + - status + summary: Get status walreplay + operationId: get-status-walreplay + responses: + "200": + description: WAL replay status retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/StatusWALReplayOutputBody' + examples: + walReplay: + summary: WAL replay status + value: + data: + current: 3214 + max: 3214 + min: 3209 + status: success + default: + description: Error retrieving WAL replay status. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/delete_series: + put: + tags: + - admin + summary: Delete series matching selectors via PUT + description: Deletes data for a selection of series in a time range using PUT method. + operationId: deleteSeriesPut + parameters: + - name: match[] + in: query + description: Series selectors to identify series to delete. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{__name__=~"test.*"}' + - name: start + in: query + description: Start timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + responses: + "200": + description: Series deleted successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/DeleteSeriesOutputBody' + examples: + deletionSuccess: + summary: Successful series deletion + value: + status: success + default: + description: Error deleting series via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Delete series matching selectors + description: Deletes data for a selection of series in a time range. + operationId: deleteSeriesPost + parameters: + - name: match[] + in: query + description: Series selectors to identify series to delete. + required: true + explode: false + schema: + type: array + items: + type: string + examples: + example: + value: + - '{__name__=~"test.*"}' + - name: start + in: query + description: Start timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T12:37:00Z" + epoch: + value: 1767357420 + - name: end + in: query + description: End timestamp for deletion. + required: false + explode: false + schema: + oneOf: + - type: string + format: date-time + description: RFC3339 timestamp. + - type: number + format: unixtime + description: Unix timestamp in seconds. + description: Timestamp in RFC3339 format or Unix timestamp in seconds. + examples: + RFC3339: + value: "2026-01-02T13:37:00Z" + epoch: + value: 1767361020 + responses: + "200": + description: Series deleted successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/DeleteSeriesOutputBody' + examples: + deletionSuccess: + summary: Successful series deletion + value: + status: success + default: + description: Error deleting series. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/clean_tombstones: + put: + tags: + - admin + summary: Clean tombstones in the TSDB via PUT + description: Removes deleted data from disk and cleans up existing tombstones using PUT method. + operationId: cleanTombstonesPut + responses: + "200": + description: Tombstones cleaned successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/CleanTombstonesOutputBody' + examples: + tombstonesCleaned: + summary: Tombstones cleaned successfully + value: + status: success + default: + description: Error cleaning tombstones via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Clean tombstones in the TSDB + description: Removes deleted data from disk and cleans up existing tombstones. + operationId: cleanTombstonesPost + responses: + "200": + description: Tombstones cleaned successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/CleanTombstonesOutputBody' + examples: + tombstonesCleaned: + summary: Tombstones cleaned successfully + value: + status: success + default: + description: Error cleaning tombstones. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /admin/tsdb/snapshot: + put: + tags: + - admin + summary: Create a snapshot of the TSDB via PUT + description: Creates a snapshot of all current data using PUT method. + operationId: snapshotPut + parameters: + - name: skip_head + in: query + description: If true, do not snapshot data in the head block. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + responses: + "200": + description: Snapshot created successfully via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/SnapshotOutputBody' + examples: + snapshotCreated: + summary: Snapshot created successfully + value: + data: + name: 20260102T133700Z-a1b2c3d4e5f67890 + status: success + default: + description: Error creating snapshot via PUT. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + post: + tags: + - admin + summary: Create a snapshot of the TSDB + description: Creates a snapshot of all current data. + operationId: snapshotPost + parameters: + - name: skip_head + in: query + description: If true, do not snapshot data in the head block. + required: false + explode: false + schema: + type: string + examples: + example: + value: "false" + responses: + "200": + description: Snapshot created successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/SnapshotOutputBody' + examples: + snapshotCreated: + summary: Snapshot created successfully + value: + data: + name: 20260102T133700Z-a1b2c3d4e5f67890 + status: success + default: + description: Error creating snapshot. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /read: + post: + tags: + - remote + summary: Remote read endpoint + description: Prometheus remote read endpoint for federated queries. Accepts and returns Protocol Buffer encoded data. + operationId: remoteRead + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /write: + post: + tags: + - remote + summary: Remote write endpoint + description: Prometheus remote write endpoint for sending metrics. Accepts Protocol Buffer encoded write requests. + operationId: remoteWrite + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /otlp/v1/metrics: + post: + tags: + - otlp + summary: OTLP metrics write endpoint + description: OpenTelemetry Protocol metrics ingestion endpoint. Accepts OTLP/HTTP metrics in Protocol Buffer format. + operationId: otlpWrite + responses: + "204": + description: No Content + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /notifications: + get: + tags: + - notifications + summary: Get notifications + operationId: get-notifications + responses: + "200": + description: Notifications retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/NotificationsOutputBody' + examples: + notifications: + summary: Server notifications + value: + data: + - active: true + date: "2026-01-02T16:14:50.046Z" + text: Configuration reload has failed. + status: success + default: + description: Error retrieving notifications. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error + /notifications/live: + get: + tags: + - notifications + summary: Stream live notifications via Server-Sent Events + description: Subscribe to real-time server notifications using SSE. Each event contains a JSON-encoded Notification object in the data field. + operationId: notifications-live + responses: + "200": + description: Server-sent events stream established. + content: + text/event-stream: + itemSchema: + type: object + properties: + data: + type: string + contentSchema: + $ref: '#/components/schemas/Notification' + description: SSE data field containing JSON-encoded notification. + contentMediaType: application/json + title: Server Sent Event Message + required: + - data + additionalProperties: false + description: A single SSE message. The data field contains a JSON-encoded Notification object. + examples: + activeNotification: + summary: Active notification SSE message + description: An SSE message containing an active server notification. + value: + data: '{"text":"Configuration reload has failed.","date":"2026-01-02T16:14:50.046Z","active":true}' + default: + description: Error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /features: + get: + tags: + - features + summary: Get features + operationId: get-features + responses: + "200": + description: Feature flags retrieved successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/FeaturesOutputBody' + examples: + enabledFeatures: + summary: Enabled feature flags + value: + data: + - exemplar-storage + - remote-write-receiver + status: success + default: + description: Error retrieving features. + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + tsdbNotReady: + summary: TSDB not ready + value: + error: TSDB not ready + errorType: internal + status: error +components: + schemas: + Error: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + errorType: + type: string + description: Type of error that occurred. + example: bad_data + error: + type: string + description: Human-readable error message. + example: invalid parameter + required: + - status + - errorType + - error + additionalProperties: false + description: Error response. + Labels: + type: object + additionalProperties: true + description: Label set represented as a key-value map. + QueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/QueryData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for instant query. + QueryRangeOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/QueryData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for range query. + QueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The PromQL query to execute.' + example: up + time: + type: string + description: 'Form field: The evaluation timestamp (optional, defaults to current time).' + example: "2023-07-21T20:10:51.781Z" + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of metrics to return.' + example: 100 + timeout: + type: string + description: 'Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).' + example: 30s + lookback_delta: + type: string + description: 'Form field: Override the lookback period for this query (optional).' + example: 5m + stats: + type: string + description: 'Form field: When provided, include query statistics in the response (the special value ''all'' enables more comprehensive statistics).' + example: all + required: + - query + additionalProperties: false + description: POST request body for instant query. + QueryRangePostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to execute.' + example: rate(http_requests_total[5m]) + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:10:30.781Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T20:20:30.781Z" + step: + type: string + description: 'Form field: The step size of the query.' + example: 15s + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of metrics to return.' + example: 100 + timeout: + type: string + description: 'Form field: Evaluation timeout (optional, defaults to and is capped by the value of the -query.timeout flag).' + example: 30s + lookback_delta: + type: string + description: 'Form field: Override the lookback period for this query (optional).' + example: 5m + stats: + type: string + description: 'Form field: When provided, include query statistics in the response (the special value ''all'' enables more comprehensive statistics).' + example: all + required: + - query + - start + - end + - step + additionalProperties: false + description: POST request body for range query. + QueryExemplarsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + QueryExemplarsPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to execute.' + example: http_requests_total + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + required: + - query + additionalProperties: false + description: POST request body for exemplars query. + FormatQueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: string + description: Formatted query string. + example: sum by(status) (rate(http_requests_total[5m])) + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for format query endpoint. + FormatQueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to format.' + example: sum(rate(http_requests_total[5m])) by (status) + required: + - query + additionalProperties: false + description: POST request body for format query. + ParseQueryOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + ParseQueryPostInputBody: + type: object + properties: + query: + type: string + description: 'Form field: The query to parse.' + example: sum(rate(http_requests_total[5m])) + required: + - query + additionalProperties: false + description: POST request body for parse query. + QueryData: + anyOf: + - type: object + properties: + resultType: + type: string + enum: + - vector + result: + type: array + items: + anyOf: + - $ref: '#/components/schemas/FloatSample' + - $ref: '#/components/schemas/HistogramSample' + description: Array of samples (either float or histogram). + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - matrix + result: + type: array + items: + anyOf: + - $ref: '#/components/schemas/FloatSeries' + - $ref: '#/components/schemas/HistogramSeries' + description: Array of time series (either float or histogram). + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - scalar + result: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Scalar value as [timestamp, stringValue]. + required: + - resultType + - result + additionalProperties: false + - type: object + properties: + resultType: + type: string + enum: + - string + result: + type: array + items: + type: string + maxItems: 2 + minItems: 2 + description: String value as [timestamp, stringValue]. + required: + - resultType + - result + additionalProperties: false + description: Query result data. The structure of 'result' depends on 'resultType'. + example: + result: + - metric: + __name__: up + job: prometheus + value: + - 1627845600 + - "1" + resultType: vector + FloatSample: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + value: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Timestamp and float value as [unixTimestamp, stringValue]. + example: + - 1767436620 + - "1" + required: + - metric + - value + additionalProperties: false + description: A sample with a float value. + HistogramSample: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + histogram: + type: array + items: + oneOf: + - type: number + - $ref: '#/components/schemas/HistogramValue' + maxItems: 2 + minItems: 2 + description: Timestamp and histogram value as [unixTimestamp, histogramObject]. + example: + - 1767436620 + - buckets: [] + count: "60" + sum: "120" + required: + - metric + - histogram + additionalProperties: false + description: A sample with a native histogram value. + FloatSeries: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + values: + type: array + items: + type: array + items: + oneOf: + - type: number + - type: string + maxItems: 2 + minItems: 2 + description: Array of [timestamp, stringValue] pairs for float values. + required: + - metric + - values + additionalProperties: false + description: A time series with float values. + HistogramSeries: + type: object + properties: + metric: + $ref: '#/components/schemas/Labels' + histograms: + type: array + items: + type: array + items: + oneOf: + - type: number + - $ref: '#/components/schemas/HistogramValue' + maxItems: 2 + minItems: 2 + description: Array of [timestamp, histogramObject] pairs for histogram values. + required: + - metric + - histograms + additionalProperties: false + description: A time series with native histogram values. + HistogramValue: + type: object + properties: + count: + type: string + description: Total count of observations. + sum: + type: string + description: Sum of all observed values. + buckets: + type: array + items: + type: array + items: + oneOf: + - type: number + - type: string + description: Histogram buckets as [boundary_rule, lower, upper, count]. + required: + - count + - sum + additionalProperties: false + description: Native histogram value representation. + LabelsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + type: string + example: + - __name__ + - job + - instance + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of strings. + LabelsPostInputBody: + type: object + properties: + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + match[]: + type: array + items: + type: string + description: 'Form field: Series selector argument that selects the series from which to read the label names.' + example: + - '{job="prometheus"}' + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of label names to return.' + example: 100 + additionalProperties: false + description: POST request body for labels query. + LabelValuesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + type: string + example: + - __name__ + - job + - instance + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of strings. + SeriesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/Labels' + example: + - __name__: up + instance: localhost:9090 + job: prometheus + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of label sets. + SeriesPostInputBody: + type: object + properties: + start: + type: string + description: 'Form field: The start time of the query.' + example: "2023-07-21T20:00:00.000Z" + end: + type: string + description: 'Form field: The end time of the query.' + example: "2023-07-21T21:00:00.000Z" + match[]: + type: array + items: + type: string + description: 'Form field: Series selector argument that selects the series to return.' + example: + - '{job="prometheus"}' + limit: + type: integer + format: int64 + description: 'Form field: The maximum number of series to return.' + example: 100 + required: + - match[] + additionalProperties: false + description: POST request body for series query. + SeriesDeleteOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. + Metadata: + type: object + properties: + type: + type: string + description: Metric type (counter, gauge, histogram, summary, or untyped). + unit: + type: string + description: Unit of the metric. + help: + type: string + description: Help text describing the metric. + required: + - type + - unit + - help + additionalProperties: false + description: Metric metadata. + MetadataOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: object + additionalProperties: + type: array + items: + $ref: '#/components/schemas/Metadata' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for metadata endpoint. + MetricMetadata: + type: object + properties: + target: + $ref: '#/components/schemas/Labels' + metric: + type: string + description: Metric name. + type: + type: string + description: Metric type (counter, gauge, histogram, summary, or untyped). + help: + type: string + description: Help text describing the metric. + unit: + type: string + description: Unit of the metric. + required: + - target + - type + - help + - unit + additionalProperties: false + description: Target metric metadata. + Target: + type: object + properties: + discoveredLabels: + $ref: '#/components/schemas/Labels' + labels: + $ref: '#/components/schemas/Labels' + scrapePool: + type: string + description: Name of the scrape pool. + scrapeUrl: + type: string + description: URL of the target. + globalUrl: + type: string + description: Global URL of the target. + lastError: + type: string + description: Last error message from scraping. + lastScrape: + type: string + format: date-time + description: Timestamp of the last scrape. + lastScrapeDuration: + type: number + format: double + description: Duration of the last scrape in seconds. + health: + type: string + description: Health status of the target (up, down, or unknown). + scrapeInterval: + type: string + description: Scrape interval for this target. + scrapeTimeout: + type: string + description: Scrape timeout for this target. + required: + - discoveredLabels + - labels + - scrapePool + - scrapeUrl + - globalUrl + - lastError + - lastScrape + - lastScrapeDuration + - health + - scrapeInterval + - scrapeTimeout + additionalProperties: false + description: Scrape target information. + DroppedTarget: + type: object + properties: + discoveredLabels: + $ref: '#/components/schemas/Labels' + scrapePool: + type: string + description: Name of the scrape pool. + required: + - discoveredLabels + - scrapePool + additionalProperties: false + description: Dropped target information. + TargetDiscovery: + type: object + properties: + activeTargets: + type: array + items: + $ref: '#/components/schemas/Target' + droppedTargets: + type: array + items: + $ref: '#/components/schemas/DroppedTarget' + droppedTargetCounts: + type: object + additionalProperties: + type: integer + format: int64 + required: + - activeTargets + - droppedTargets + - droppedTargetCounts + additionalProperties: false + description: Target discovery information including active and dropped targets. + TargetsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/TargetDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for targets endpoint. + TargetMetadataOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/MetricMetadata' + example: + - help: The current health status of the target + metric: up + target: + instance: localhost:9090 + job: prometheus + type: gauge + unit: "" + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of metric metadata. + ScrapePoolsDiscovery: + type: object + properties: + scrapePools: + type: array + items: + type: string + required: + - scrapePools + additionalProperties: false + description: List of all configured scrape pools. + ScrapePoolsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/ScrapePoolsDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for scrape pools endpoint. + Config: + type: object + properties: + source_labels: + type: array + items: + type: string + description: Source labels for relabeling. + separator: + type: string + description: Separator for source label values. + regex: + type: string + description: Regular expression for matching. + modulus: + type: integer + format: int64 + description: Modulus for hash-based relabeling. + target_label: + type: string + description: Target label name. + replacement: + type: string + description: Replacement value. + action: + type: string + description: Relabel action. + additionalProperties: false + description: Relabel configuration. + RelabelStep: + type: object + properties: + rule: + $ref: '#/components/schemas/Config' + output: + $ref: '#/components/schemas/Labels' + keep: + type: boolean + required: + - rule + - output + - keep + additionalProperties: false + description: Relabel step showing the rule, output, and whether the target was kept. + RelabelStepsResponse: + type: object + properties: + steps: + type: array + items: + $ref: '#/components/schemas/RelabelStep' + required: + - steps + additionalProperties: false + description: Relabeling steps response. + TargetRelabelStepsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RelabelStepsResponse' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for target relabel steps endpoint. + RuleGroup: + type: object + properties: + name: + type: string + description: Name of the rule group. + file: + type: string + description: File containing the rule group. + rules: + type: array + items: + type: object + description: Rule definition. + description: Rules in this group. + interval: + type: number + format: double + description: Evaluation interval in seconds. + limit: + type: integer + format: int64 + description: Maximum number of alerts for this group. + evaluationTime: + type: number + format: double + description: Time taken to evaluate the group in seconds. + lastEvaluation: + type: string + format: date-time + description: Timestamp of the last evaluation. + required: + - name + - file + - rules + - interval + - limit + - evaluationTime + - lastEvaluation + additionalProperties: false + description: Rule group information. + RuleDiscovery: + type: object + properties: + groups: + type: array + items: + $ref: '#/components/schemas/RuleGroup' + groupNextToken: + type: string + description: Pagination token for the next page of groups. + required: + - groups + additionalProperties: false + description: Rule discovery information containing all rule groups. + RulesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RuleDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for rules endpoint. + Alert: + type: object + properties: + labels: + $ref: '#/components/schemas/Labels' + annotations: + $ref: '#/components/schemas/Labels' + state: + type: string + description: State of the alert (pending, firing, or inactive). + value: + type: string + description: Value of the alert expression. + activeAt: + type: string + format: date-time + description: Timestamp when the alert became active. + keepFiringSince: + type: string + format: date-time + description: Timestamp since the alert has been kept firing. + required: + - labels + - annotations + - state + - value + additionalProperties: false + description: Alert information. + AlertDiscovery: + type: object + properties: + alerts: + type: array + items: + $ref: '#/components/schemas/Alert' + required: + - alerts + additionalProperties: false + description: Alert discovery information containing all active alerts. + AlertsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/AlertDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for alerts endpoint. + AlertmanagerTarget: + type: object + properties: + url: + type: string + description: URL of the Alertmanager instance. + required: + - url + additionalProperties: false + description: Alertmanager target information. + AlertmanagerDiscovery: + type: object + properties: + activeAlertmanagers: + type: array + items: + $ref: '#/components/schemas/AlertmanagerTarget' + droppedAlertmanagers: + type: array + items: + $ref: '#/components/schemas/AlertmanagerTarget' + required: + - activeAlertmanagers + - droppedAlertmanagers + additionalProperties: false + description: Alertmanager discovery information including active and dropped instances. + AlertmanagersOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/AlertmanagerDiscovery' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for alertmanagers endpoint. + StatusConfigData: + type: object + properties: + yaml: + type: string + description: Prometheus configuration in YAML format. + required: + - yaml + additionalProperties: false + description: Prometheus configuration. + StatusConfigOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusConfigData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status config endpoint. + RuntimeInfo: + type: object + properties: + startTime: + type: string + format: date-time + CWD: + type: string + hostname: + type: string + serverTime: + type: string + format: date-time + reloadConfigSuccess: + type: boolean + lastConfigTime: + type: string + format: date-time + corruptionCount: + type: integer + format: int64 + goroutineCount: + type: integer + format: int64 + GOMAXPROCS: + type: integer + format: int64 + GOMEMLIMIT: + type: integer + format: int64 + GOGC: + type: string + GODEBUG: + type: string + storageRetention: + type: string + required: + - startTime + - CWD + - hostname + - serverTime + - reloadConfigSuccess + - lastConfigTime + - corruptionCount + - goroutineCount + - GOMAXPROCS + - GOMEMLIMIT + - GOGC + - GODEBUG + - storageRetention + additionalProperties: false + description: Prometheus runtime information. + StatusRuntimeInfoOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/RuntimeInfo' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status runtime info endpoint. + PrometheusVersion: + type: object + properties: + version: + type: string + revision: + type: string + branch: + type: string + buildUser: + type: string + buildDate: + type: string + goVersion: + type: string + required: + - version + - revision + - branch + - buildUser + - buildDate + - goVersion + additionalProperties: false + description: Prometheus version information. + StatusBuildInfoOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/PrometheusVersion' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status build info endpoint. + StatusFlagsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: object + additionalProperties: + type: string + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status flags endpoint. + HeadStats: + type: object + properties: + numSeries: + type: integer + format: int64 + numLabelPairs: + type: integer + format: int64 + chunkCount: + type: integer + format: int64 + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + required: + - numSeries + - numLabelPairs + - chunkCount + - minTime + - maxTime + additionalProperties: false + description: TSDB head statistics. + TSDBStat: + type: object + properties: + name: + type: string + value: + type: integer + format: int64 + required: + - name + - value + additionalProperties: false + description: TSDB statistic. + TSDBStatus: + type: object + properties: + headStats: + $ref: '#/components/schemas/HeadStats' + seriesCountByMetricName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + labelValueCountByLabelName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + memoryInBytesByLabelName: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + seriesCountByLabelValuePair: + type: array + items: + $ref: '#/components/schemas/TSDBStat' + required: + - headStats + - seriesCountByMetricName + - labelValueCountByLabelName + - memoryInBytesByLabelName + - seriesCountByLabelValuePair + additionalProperties: false + description: TSDB status information. + StatusTSDBOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/TSDBStatus' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status TSDB endpoint. + BlockDesc: + type: object + properties: + ulid: + type: string + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + required: + - ulid + - minTime + - maxTime + additionalProperties: false + description: Block descriptor. + BlockStats: + type: object + properties: + numSamples: + type: integer + format: int64 + numSeries: + type: integer + format: int64 + numChunks: + type: integer + format: int64 + numTombstones: + type: integer + format: int64 + numFloatSamples: + type: integer + format: int64 + numHistogramSamples: + type: integer + format: int64 + additionalProperties: false + description: Block statistics. + BlockMetaCompaction: + type: object + properties: + level: + type: integer + format: int64 + sources: + type: array + items: + type: string + parents: + type: array + items: + $ref: '#/components/schemas/BlockDesc' + failed: + type: boolean + deletable: + type: boolean + hints: + type: array + items: + type: string + required: + - level + additionalProperties: false + description: Block compaction metadata. + BlockMeta: + type: object + properties: + ulid: + type: string + minTime: + type: integer + format: int64 + maxTime: + type: integer + format: int64 + stats: + $ref: '#/components/schemas/BlockStats' + compaction: + $ref: '#/components/schemas/BlockMetaCompaction' + version: + type: integer + format: int64 + required: + - ulid + - minTime + - maxTime + - compaction + - version + additionalProperties: false + description: Block metadata. + StatusTSDBBlocksData: + type: object + properties: + blocks: + type: array + items: + $ref: '#/components/schemas/BlockMeta' + required: + - blocks + additionalProperties: false + description: TSDB blocks information. + StatusTSDBBlocksOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusTSDBBlocksData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status TSDB blocks endpoint. + StatusWALReplayData: + type: object + properties: + min: + type: integer + format: int64 + max: + type: integer + format: int64 + current: + type: integer + format: int64 + required: + - min + - max + - current + additionalProperties: false + description: WAL replay status. + StatusWALReplayOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/StatusWALReplayData' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for status WAL replay endpoint. + DeleteSeriesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + additionalProperties: false + description: Response body containing only status. + CleanTombstonesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + additionalProperties: false + description: Response body containing only status. + DataStruct: + type: object + properties: + name: + type: string + required: + - name + additionalProperties: false + description: Generic data structure with a name field. + SnapshotOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + $ref: '#/components/schemas/DataStruct' + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body for snapshot endpoint. + Notification: + type: object + properties: + text: + type: string + date: + type: string + format: date-time + active: + type: boolean + required: + - text + - date + - active + additionalProperties: false + description: Server notification. + NotificationsOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + type: array + items: + $ref: '#/components/schemas/Notification' + example: + - active: true + date: "2023-07-21T20:00:00.000Z" + text: Server is running + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Response body with an array of notifications. + FeaturesOutputBody: + type: object + properties: + status: + type: string + enum: + - success + - error + description: Response status. + example: success + data: + description: Response data (structure varies by endpoint). + example: + result: ok + warnings: + type: array + items: + type: string + description: Only set if there were warnings while executing the request. There will still be data in the data field. + infos: + type: array + items: + type: string + description: Only set if there were info-level annotations while executing the request. + required: + - status + - data + additionalProperties: false + description: Generic response body. +tags: + - name: query + summary: Query + description: Query and evaluate PromQL expressions. + - name: metadata + summary: Metadata + description: Retrieve metric metadata such as type and unit. + - name: labels + summary: Labels + description: Query label names and values. + - name: series + summary: Series + description: Query and manage time series. + - name: targets + summary: Targets + description: Retrieve target and scrape pool information. + - name: rules + summary: Rules + description: Query recording and alerting rules. + - name: alerts + summary: Alerts + description: Query active alerts and alertmanager discovery. + - name: status + summary: Status + description: Retrieve server status and configuration. + - name: admin + summary: Admin + description: Administrative operations for TSDB management. + - name: features + summary: Features + description: Query enabled features. + - name: remote + summary: Remote Storage + description: Remote read and write endpoints. + - name: otlp + summary: OTLP + description: OpenTelemetry Protocol metrics ingestion. + - name: notifications + summary: Notifications + description: Server notifications and events. diff --git a/web/ui/mantine-ui/src/promql/tools/go.mod b/web/ui/mantine-ui/src/promql/tools/go.mod index a3abc881e2..d3f69a698b 100644 --- a/web/ui/mantine-ui/src/promql/tools/go.mod +++ b/web/ui/mantine-ui/src/promql/tools/go.mod @@ -1,6 +1,6 @@ module github.com/prometheus/prometheus/web/ui/mantine-ui/src/promql/tools -go 1.24.0 +go 1.25.0 require ( github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 diff --git a/web/web.go b/web/web.go index 4df447be64..aec2f2d4ee 100644 --- a/web/web.go +++ b/web/web.go @@ -361,6 +361,11 @@ func New(logger *slog.Logger, o *Options) *Handler { app = h.storage } + version := "" + if o.Version != nil { + version = o.Version.Version + } + h.apiV1 = api_v1.NewAPI(h.queryEngine, h.storage, app, h.exemplarStorage, factorySPr, factoryTr, factoryAr, func() config.Config { h.mtx.RLock() @@ -402,6 +407,10 @@ func New(logger *slog.Logger, o *Options) *Handler { o.AppendMetadata, nil, o.FeatureRegistry, + api_v1.OpenAPIOptions{ + ExternalURL: o.ExternalURL.String(), + Version: version, + }, ) if r := o.FeatureRegistry; r != nil { diff --git a/web/web_test.go b/web/web_test.go index ce682912a9..cbcf15ffdc 100644 --- a/web/web_test.go +++ b/web/web_test.go @@ -328,6 +328,7 @@ func TestDebugHandler(t *testing.T) { Host: "localhost.localdomain:9090", Scheme: "http", }, + Version: &PrometheusVersion{}, } handler := New(nil, opts) handler.SetReady(Ready) @@ -353,6 +354,7 @@ func TestHTTPMetrics(t *testing.T) { Host: "localhost.localdomain:9090", Scheme: "http", }, + Version: &PrometheusVersion{}, }) getReady := func() int { t.Helper()