mirror of
https://github.com/prometheus/prometheus.git
synced 2026-02-03 20:39:32 -05:00
Merge pull request #17644 from prometheus/binop-fill-modifier
Some checks are pending
buf.build / lint and publish (push) Waiting to run
CI / Go tests (push) Waiting to run
CI / More Go tests (push) Waiting to run
CI / Go tests with previous Go version (push) Waiting to run
CI / UI tests (push) Waiting to run
CI / Go tests on Windows (push) Waiting to run
CI / Mixins tests (push) Waiting to run
CI / Build Prometheus for common architectures (push) Waiting to run
CI / Build Prometheus for all architectures (push) Waiting to run
CI / Report status of build Prometheus for all architectures (push) Blocked by required conditions
CI / Check generated parser (push) Waiting to run
CI / golangci-lint (push) Waiting to run
CI / fuzzing (push) Waiting to run
CI / codeql (push) Waiting to run
CI / Publish main branch artifacts (push) Blocked by required conditions
CI / Publish release artefacts (push) Blocked by required conditions
CI / Publish UI on npm Registry (push) Blocked by required conditions
Scorecards supply-chain security / Scorecards analysis (push) Waiting to run
Some checks are pending
buf.build / lint and publish (push) Waiting to run
CI / Go tests (push) Waiting to run
CI / More Go tests (push) Waiting to run
CI / Go tests with previous Go version (push) Waiting to run
CI / UI tests (push) Waiting to run
CI / Go tests on Windows (push) Waiting to run
CI / Mixins tests (push) Waiting to run
CI / Build Prometheus for common architectures (push) Waiting to run
CI / Build Prometheus for all architectures (push) Waiting to run
CI / Report status of build Prometheus for all architectures (push) Blocked by required conditions
CI / Check generated parser (push) Waiting to run
CI / golangci-lint (push) Waiting to run
CI / fuzzing (push) Waiting to run
CI / codeql (push) Waiting to run
CI / Publish main branch artifacts (push) Blocked by required conditions
CI / Publish release artefacts (push) Blocked by required conditions
CI / Publish UI on npm Registry (push) Blocked by required conditions
Scorecards supply-chain security / Scorecards analysis (push) Waiting to run
PromQL: Add `fill*()` binop modifiers to provide default values for missing series
This commit is contained in:
commit
1d3d98ed16
31 changed files with 2242 additions and 746 deletions
|
|
@ -281,6 +281,9 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
|
|||
case "promql-extended-range-selectors":
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
logger.Info("Experimental PromQL extended range selectors enabled.")
|
||||
case "promql-binop-fill-modifiers":
|
||||
parser.EnableBinopFillModifiers = true
|
||||
logger.Info("Experimental PromQL binary operator fill modifiers enabled.")
|
||||
case "":
|
||||
continue
|
||||
case "old-ui":
|
||||
|
|
@ -578,7 +581,7 @@ func main() {
|
|||
a.Flag("scrape.discovery-reload-interval", "Interval used by scrape manager to throttle target groups updates.").
|
||||
Hidden().Default("5s").SetValue(&cfg.scrape.DiscoveryReloadInterval)
|
||||
|
||||
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr, use-uncached-io, promql-extended-range-selectors. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
|
||||
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr, use-uncached-io, promql-extended-range-selectors, promql-binop-fill-modifiers. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
|
||||
Default("").StringsVar(&cfg.featureList)
|
||||
|
||||
a.Flag("agent", "Run Prometheus in 'Agent mode'.").BoolVar(&agentMode)
|
||||
|
|
|
|||
3
cmd/prometheus/testdata/features.json
vendored
3
cmd/prometheus/testdata/features.json
vendored
|
|
@ -28,6 +28,9 @@
|
|||
"by": true,
|
||||
"delayed_name_removal": false,
|
||||
"duration_expr": false,
|
||||
"fill": false,
|
||||
"fill_left": false,
|
||||
"fill_right": false,
|
||||
"group_left": true,
|
||||
"group_right": true,
|
||||
"ignoring": true,
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ The Prometheus monitoring server
|
|||
| <code class="text-nowrap">--query.timeout</code> | Maximum time a query may take before being aborted. Use with server mode only. | `2m` |
|
||||
| <code class="text-nowrap">--query.max-concurrency</code> | Maximum number of queries executed concurrently. Use with server mode only. | `20` |
|
||||
| <code class="text-nowrap">--query.max-samples</code> | Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return. Use with server mode only. | `50000000` |
|
||||
| <code class="text-nowrap">--enable-feature</code> <code class="text-nowrap">...<code class="text-nowrap"> | Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr, use-uncached-io, promql-extended-range-selectors. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | |
|
||||
| <code class="text-nowrap">--enable-feature</code> <code class="text-nowrap">...<code class="text-nowrap"> | Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr, use-uncached-io, promql-extended-range-selectors, promql-binop-fill-modifiers. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | |
|
||||
| <code class="text-nowrap">--agent</code> | Run Prometheus in 'Agent mode'. | |
|
||||
| <code class="text-nowrap">--log.level</code> | Only log messages with the given severity or above. One of: [debug, info, warn, error] | `info` |
|
||||
| <code class="text-nowrap">--log.format</code> | Output format of log messages. One of: [logfmt, json] | `logfmt` |
|
||||
|
|
|
|||
|
|
@ -67,12 +67,12 @@ Currently, Prometheus supports start timestamps on the
|
|||
|
||||
* `PrometheusProto`
|
||||
* `OpenMetrics1.0.0`
|
||||
|
||||
|
||||
|
||||
From the above, Prometheus recommends `PrometheusProto`. This is because OpenMetrics 1.0 Start Timestamp information is shared as a `<metric>_created` metric and parsing those
|
||||
are prone to errors and expensive (thus, adding an overhead). You also need to be careful to not pollute your Prometheus with extra `_created` metrics.
|
||||
|
||||
Therefore, when `created-timestamp-zero-ingestion` is enabled Prometheus changes the global `scrape_protocols` default configuration option to
|
||||
|
||||
Therefore, when `created-timestamp-zero-ingestion` is enabled Prometheus changes the global `scrape_protocols` default configuration option to
|
||||
`[ PrometheusProto, OpenMetricsText1.0.0, OpenMetricsText0.0.1, PrometheusText0.0.4 ]`, resulting in negotiating the Prometheus Protobuf protocol first (unless the `scrape_protocols` option is set to a different value explicitly).
|
||||
|
||||
Besides enabling this feature in Prometheus, start timestamps need to be exposed by the application being scraped.
|
||||
|
|
@ -288,8 +288,8 @@ when wrong types are used on wrong functions, automatic renames, delta types and
|
|||
|
||||
### Behavior with metadata records
|
||||
|
||||
When this feature is enabled and the metadata WAL records exists, in an unlikely situation when type or unit are different across those,
|
||||
the Prometheus outputs intends to prefer the `__type__` and `__unit__` labels values. For example on Remote Write 2.0,
|
||||
When this feature is enabled and the metadata WAL records exists, in an unlikely situation when type or unit are different across those,
|
||||
the Prometheus outputs intends to prefer the `__type__` and `__unit__` labels values. For example on Remote Write 2.0,
|
||||
if the metadata record somehow (e.g. due to bug) says "counter", but `__type__="gauge"` the remote time series will be set to a gauge.
|
||||
|
||||
## Use Uncached IO
|
||||
|
|
@ -338,9 +338,25 @@ Example query:
|
|||
|
||||
> **Note for alerting and recording rules:**
|
||||
> The `smoothed` modifier requires samples after the evaluation interval, so using it directly in alerting or recording rules will typically *under-estimate* the result, as future samples are not available at evaluation time.
|
||||
> To use `smoothed` safely in rules, you **must** apply a `query_offset` to the rule group (see [documentation](https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/#rule_group)) to ensure the calculation window is fully in the past and all needed samples are available.
|
||||
> To use `smoothed` safely in rules, you **must** apply a `query_offset` to the rule group (see [documentation](https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/#rule_group)) to ensure the calculation window is fully in the past and all needed samples are available.
|
||||
> For critical alerting, set the offset to at least one scrape interval; for less critical or more resilient use cases, consider a larger offset (multiple scrape intervals) to tolerate missed scrapes.
|
||||
|
||||
For more details, see the [design doc](https://github.com/prometheus/proposals/blob/main/proposals/2025-04-04_extended-range-selectors-semantics.md).
|
||||
|
||||
**Note**: Extended Range Selectors are not supported for subqueries.
|
||||
|
||||
## Binary operator fill modifiers
|
||||
|
||||
`--enable-feature=promql-binop-fill-modifiers`
|
||||
|
||||
Enables experimental `fill()`, `fill_left()`, and `fill_right()` modifiers for PromQL binary operators. These modifiers allow filling in missing matches on either side of a binary operation with a provided default sample value.
|
||||
|
||||
Example query:
|
||||
|
||||
```
|
||||
rate(successful_requests[5m])
|
||||
+ fill(0)
|
||||
rate(failed_requests[5m])
|
||||
```
|
||||
|
||||
See [the fill modifiers documentation](querying/operators.md#filling-in-missing-matches) for more details and examples.
|
||||
|
|
|
|||
|
|
@ -47,9 +47,9 @@ special values like `NaN`, `+Inf`, and `-Inf`.
|
|||
scalar that is the result of the operator applied to both scalar operands.
|
||||
|
||||
**Between an instant vector and a scalar**, the operator is applied to the
|
||||
value of every data sample in the vector.
|
||||
value of every data sample in the vector.
|
||||
|
||||
If the data sample is a float, the operation is performed between that float and the scalar.
|
||||
If the data sample is a float, the operation is performed between that float and the scalar.
|
||||
For example, if an instant vector of float samples is multiplied by 2,
|
||||
the result is another vector of float samples in which every sample value of
|
||||
the original vector is multiplied by 2.
|
||||
|
|
@ -81,8 +81,9 @@ following:
|
|||
**Between two instant vectors**, a binary arithmetic operator is applied to
|
||||
each entry in the LHS vector and its [matching element](#vector-matching) in
|
||||
the RHS vector. The result is propagated into the result vector with the
|
||||
grouping labels becoming the output label set. Entries for which no matching
|
||||
entry in the right-hand vector can be found are not part of the result.
|
||||
grouping labels becoming the output label set. By default, series for which
|
||||
no matching entry in the opposite vector can be found are not part of the
|
||||
result. This behavior can be adjusted using [fill modifiers](#filling-in-missing-matches).
|
||||
|
||||
If two float samples are matched, the arithmetic operator is applied to the two
|
||||
input values.
|
||||
|
|
@ -97,7 +98,7 @@ If two histogram samples are matched, only `+` and `-` are valid operations,
|
|||
each adding or subtracting all matching bucket populations and the count and
|
||||
the sum of observations. All other operations result in the removal of the
|
||||
corresponding element from the output vector, flagged by an info-level
|
||||
annotation. The `+` and -` operations should generally only be applied to gauge
|
||||
annotation. The `+` and `-` operations should generally only be applied to gauge
|
||||
histograms, but PromQL allows them for counter histograms, too, to cover
|
||||
specific use cases, for which special attention is required to avoid problems
|
||||
with unaligned counter resets. (Certain incompatibilities of counter resets can
|
||||
|
|
@ -106,7 +107,7 @@ two counter histograms results in a counter histogram. All other combination of
|
|||
operands and all subtractions result in a gauge histogram.
|
||||
|
||||
**In any arithmetic binary operation involving vectors**, the metric name is
|
||||
dropped. This occurs even if `__name__` is explicitly mentioned in `on`
|
||||
dropped. This occurs even if `__name__` is explicitly mentioned in `on`
|
||||
(see https://github.com/prometheus/prometheus/issues/16631 for further discussion).
|
||||
|
||||
**For any arithmetic binary operation that may result in a negative
|
||||
|
|
@ -156,9 +157,9 @@ info-level annotation.
|
|||
applied to matching entries. Vector elements for which the expression is not
|
||||
true or which do not find a match on the other side of the expression get
|
||||
dropped from the result, while the others are propagated into a result vector
|
||||
with the grouping labels becoming the output label set.
|
||||
with the grouping labels becoming the output label set.
|
||||
|
||||
Matches between two float samples work as usual.
|
||||
Matches between two float samples work as usual.
|
||||
|
||||
Matches between a float sample and a histogram sample are invalid, and the
|
||||
corresponding element is removed from the result vector, flagged by an info-level
|
||||
|
|
@ -171,8 +172,8 @@ comparison binary operations are again invalid.
|
|||
modifier changes the behavior in the following ways:
|
||||
|
||||
* Vector elements which find a match on the other side of the expression but for
|
||||
which the expression is false instead have the value `0` and vector elements
|
||||
that do find a match and for which the expression is true have the value `1`.
|
||||
which the expression is false instead have the value `0`, and vector elements
|
||||
that do find a match and for which the expression is true have the value `1`.
|
||||
(Note that elements with no match or invalid operations involving histogram
|
||||
samples still return no result rather than the value `0`.)
|
||||
* The metric name is dropped.
|
||||
|
|
@ -216,11 +217,10 @@ matching behavior: One-to-one and many-to-one/one-to-many.
|
|||
|
||||
### Vector matching keywords
|
||||
|
||||
These vector matching keywords allow for matching between series with different label sets
|
||||
providing:
|
||||
These vector matching keywords allow for matching between series with different label sets:
|
||||
|
||||
* `on`
|
||||
* `ignoring`
|
||||
* `on(<label list>)`: Only match on provided labels.
|
||||
* `ignoring(<label list>)`: Ignore provided labels when matching.
|
||||
|
||||
Label lists provided to matching keywords will determine how vectors are combined. Examples
|
||||
can be found in [One-to-one vector matches](#one-to-one-vector-matches) and in
|
||||
|
|
@ -230,8 +230,8 @@ can be found in [One-to-one vector matches](#one-to-one-vector-matches) and in
|
|||
|
||||
These group modifiers enable many-to-one/one-to-many vector matching:
|
||||
|
||||
* `group_left`
|
||||
* `group_right`
|
||||
* `group_left`: Allow many-to-one matching, where the left vector has higher cardinality.
|
||||
* `group_right`: Allow one-to-many matching, where the right vector has higher cardinality.
|
||||
|
||||
Label lists can be provided to the group modifier which contain labels from the "one"-side to
|
||||
be included in the result metrics.
|
||||
|
|
@ -239,11 +239,9 @@ be included in the result metrics.
|
|||
_Many-to-one and one-to-many matching are advanced use cases that should be carefully considered.
|
||||
Often a proper use of `ignoring(<labels>)` provides the desired outcome._
|
||||
|
||||
_Grouping modifiers can only be used for
|
||||
[comparison](#comparison-binary-operators) and
|
||||
[arithmetic](#arithmetic-binary-operators). Operations as `and`, `unless` and
|
||||
`or` operations match with all possible entries in the right vector by
|
||||
default._
|
||||
_Grouping modifiers can only be used for [comparison](#comparison-binary-operators),
|
||||
[arithmetic](#arithmetic-binary-operators), and [trigonometric](#trigonometric-binary-operators)
|
||||
operators. Set operators match with all possible entries on either side by default._
|
||||
|
||||
### One-to-one vector matches
|
||||
|
||||
|
|
@ -311,6 +309,58 @@ left:
|
|||
{method="post", code="500"} 0.05 // 6 / 120
|
||||
{method="post", code="404"} 0.175 // 21 / 120
|
||||
|
||||
### Filling in missing matches
|
||||
|
||||
Fill modifiers are **experimental** and must be enabled with `--enable-feature=promql-binop-fill-modifiers`.
|
||||
|
||||
By default, vector elements that do not find a match on the other side of a binary operation
|
||||
are not included in the result vector. Fill modifiers allow overriding this behavior by filling
|
||||
in missing series on either side of a binary operation with a provided default sample value:
|
||||
|
||||
* `fill(<value>)`: Fill in missing matches on either side with `value`.
|
||||
* `fill_left(<value>)`: Fill in missing matches on the left side with `value`.
|
||||
* `fill_right(<value>)`: Fill in missing matches on the right side with `value`.
|
||||
|
||||
`value` has to be a numeric literal representing a float sample. Histogram samples are not supported.
|
||||
|
||||
Note that these modifiers can only fill in series that are missing on one side of the operation.
|
||||
If a series is missing on both sides, it cannot be created by these modifiers.
|
||||
|
||||
The fill modifiers can be used in the following combinations:
|
||||
|
||||
* `fill(<default>)`
|
||||
* `fill_left(<default>)`
|
||||
* `fill_right(<default>)`
|
||||
* `fill_left(<default>) fill_right(<default>)`
|
||||
* `fill_right(<default>) fill_left(<default>)`
|
||||
|
||||
If other binary operator modifiers like `bool`, `on`, `ignoring`, `group_left`, or `group_right`
|
||||
are used, the fill modifiers must be provided last.
|
||||
|
||||
When using fill modifiers in combination with `group_left` or `group_right`, they behave as follows:
|
||||
|
||||
* If a fill modifier is used on the "many" side of a match, it will only fill in a single series
|
||||
for the "many" side of each match group, using the group's matching labels as the series identity.
|
||||
* If a fill modifier is used on the "one" side of a match and the grouping modifier specifies
|
||||
label names to include from the "one" side (e.g. `left_vector * on(instance, job) group_left(info_label) fill_right(1) right_vector`), those labels will not be filled in for missing
|
||||
series, as there is no source for their values.
|
||||
|
||||
Fill modifiers are not supported for set operators (`and`, `or`, `unless`), as the purpose of those
|
||||
operators is to filter series based on presence or absence in the other vector.
|
||||
|
||||
Example query, filling in missing series on the either side with `0`:
|
||||
|
||||
method_code:http_errors:rate5m{status="500"} / ignoring(code) fill(0) method:http_requests:rate5m
|
||||
|
||||
This returns a result vector containing the fraction of HTTP requests with status code
|
||||
of 500 for each method, as measured over the last 5 minutes. The entries with methods `put` and `del`
|
||||
are now included in the result with a filled-in default sample value of `0`, as they had no matching
|
||||
series on the respective other side:
|
||||
|
||||
{method="get"} 0.04 # 24 / 600
|
||||
{method="put"} +Inf # 3 / 0 (missing right side filled in)
|
||||
{method="del"} 0 # 0 / 34 (missing left side filled in)
|
||||
{method="post"} 0.05 # 6 / 120
|
||||
|
||||
## Aggregation operators
|
||||
|
||||
|
|
@ -357,7 +407,7 @@ identical between all elements of the vector.
|
|||
#### `sum`
|
||||
|
||||
`sum(v)` sums up sample values in `v` in the same way as the `+` binary operator does
|
||||
between two values.
|
||||
between two values.
|
||||
|
||||
All sample values being aggregated into a single resulting vector element must either be
|
||||
float samples or histogram samples. An aggregation of a mix of both is invalid,
|
||||
|
|
@ -393,7 +443,7 @@ vector, flagged by a warn-level annotation.
|
|||
|
||||
#### `min` and `max`
|
||||
|
||||
`min(v)` and `max(v)` return the minimum or maximum value, respectively, in `v`.
|
||||
`min(v)` and `max(v)` return the minimum or maximum value, respectively, in `v`.
|
||||
|
||||
They only operate on float samples, following IEEE 754 floating
|
||||
point arithmetic, which in particular implies that `NaN` is only ever
|
||||
|
|
@ -403,9 +453,9 @@ samples in the input vector are ignored, flagged by an info-level annotation.
|
|||
#### `topk` and `bottomk`
|
||||
|
||||
`topk(k, v)` and `bottomk(k, v)` are different from other aggregators in that a subset of
|
||||
`k` values from the input samples, including the original labels, are returned in the result vector.
|
||||
`k` values from the input samples, including the original labels, are returned in the result vector.
|
||||
|
||||
`by` and `without` are only used to bucket the input vector.
|
||||
`by` and `without` are only used to bucket the input vector.
|
||||
|
||||
Similar to `min` and `max`, they only operate on float samples, considering `NaN` values
|
||||
to be farthest from the top or bottom, respectively. Histogram samples in the
|
||||
|
|
@ -415,7 +465,7 @@ If used in an instant query, `topk` and `bottomk` return series ordered by
|
|||
value in descending or ascending order, respectively. If used with `by` or
|
||||
`without`, then series within each bucket are sorted by value, and series in
|
||||
the same bucket are returned consecutively, but there is no guarantee that
|
||||
buckets of series will be returned in any particular order.
|
||||
buckets of series will be returned in any particular order.
|
||||
|
||||
No sorting applies to range queries.
|
||||
|
||||
|
|
@ -428,11 +478,11 @@ To get the 5 instances with the highest memory consumption across all instances
|
|||
#### `limitk`
|
||||
|
||||
`limitk(k, v)` returns a subset of `k` input samples, including
|
||||
the original labels in the result vector.
|
||||
the original labels in the result vector.
|
||||
|
||||
The subset is selected in a deterministic pseudo-random way.
|
||||
This happens independent of the sample type.
|
||||
Therefore, it works for both float samples and histogram samples.
|
||||
This happens independent of the sample type.
|
||||
Therefore, it works for both float samples and histogram samples.
|
||||
|
||||
##### Example
|
||||
|
||||
|
|
@ -470,8 +520,8 @@ The value may be a float or histogram sample.
|
|||
|
||||
#### `count_values`
|
||||
|
||||
`count_values(l, v)` outputs one time series per unique sample value in `v`.
|
||||
Each series has an additional label, given by `l`, and the label value is the
|
||||
`count_values(l, v)` outputs one time series per unique sample value in `v`.
|
||||
Each series has an additional label, given by `l`, and the label value is the
|
||||
unique sample value. The value of each time series is the number of times that sample value was present.
|
||||
|
||||
`count_values` works with both float samples and histogram samples. For the
|
||||
|
|
@ -486,7 +536,7 @@ To count the number of binaries running each build version we could write:
|
|||
|
||||
#### `stddev`
|
||||
|
||||
`stddev(v)` returns the standard deviation of `v`.
|
||||
`stddev(v)` returns the standard deviation of `v`.
|
||||
|
||||
`stddev` only works with float samples, following IEEE 754 floating
|
||||
point arithmetic. Histogram samples in the input vector are ignored, flagged by
|
||||
|
|
@ -494,7 +544,7 @@ an info-level annotation.
|
|||
|
||||
#### `stdvar`
|
||||
|
||||
`stdvar(v)` returns the standard deviation of `v`.
|
||||
`stdvar(v)` returns the standard deviation of `v`.
|
||||
|
||||
`stdvar` only works with float samples, following IEEE 754 floating
|
||||
point arithmetic. Histogram samples in the input vector are ignored, flagged by
|
||||
|
|
@ -510,12 +560,12 @@ are ignored, flagged by an info-level annotation.
|
|||
|
||||
`NaN` is considered the smallest possible value.
|
||||
|
||||
For example, `quantile(0.5, ...)` calculates the median, `quantile(0.95, ...)` the 95th percentile.
|
||||
For example, `quantile(0.5, ...)` calculates the median, `quantile(0.95, ...)` the 95th percentile.
|
||||
|
||||
Special cases:
|
||||
|
||||
* For φ = `NaN`, `NaN` is returned.
|
||||
* For φ < 0, `-Inf` is returned.
|
||||
* For φ < 0, `-Inf` is returned.
|
||||
* For φ > 1, `+Inf` is returned.
|
||||
|
||||
## Binary operator precedence
|
||||
|
|
|
|||
|
|
@ -2862,7 +2862,8 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
|
|||
if matching.Card == parser.CardManyToMany {
|
||||
panic("many-to-many only allowed for set operators")
|
||||
}
|
||||
if len(lhs) == 0 || len(rhs) == 0 {
|
||||
if (len(lhs) == 0 && len(rhs) == 0) ||
|
||||
((len(lhs) == 0 || len(rhs) == 0) && matching.FillValues.RHS == nil && matching.FillValues.LHS == nil) {
|
||||
return nil, nil // Short-circuit: nothing is going to match.
|
||||
}
|
||||
|
||||
|
|
@ -2910,17 +2911,9 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
|
|||
}
|
||||
matchedSigs := enh.matchedSigs
|
||||
|
||||
// For all lhs samples find a respective rhs sample and perform
|
||||
// the binary operation.
|
||||
var lastErr error
|
||||
for i, ls := range lhs {
|
||||
sigOrd := lhsh[i].sigOrdinal
|
||||
|
||||
rs, found := rightSigs[sigOrd] // Look for a match in the rhs Vector.
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
doBinOp := func(ls, rs Sample, sigOrd int) {
|
||||
// Account for potentially swapped sidedness.
|
||||
fl, fr := ls.F, rs.F
|
||||
hl, hr := ls.H, rs.H
|
||||
|
|
@ -2931,7 +2924,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
|
|||
floatValue, histogramValue, keep, info, err := vectorElemBinop(op, fl, fr, hl, hr, pos)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
return
|
||||
}
|
||||
if info != nil {
|
||||
lastErr = info
|
||||
|
|
@ -2971,7 +2964,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
|
|||
}
|
||||
|
||||
if !keep && !returnBool {
|
||||
continue
|
||||
return
|
||||
}
|
||||
|
||||
enh.Out = append(enh.Out, Sample{
|
||||
|
|
@ -2981,6 +2974,43 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
|
|||
DropName: returnBool,
|
||||
})
|
||||
}
|
||||
|
||||
// For all lhs samples, find a respective rhs sample and perform
|
||||
// the binary operation.
|
||||
for i, ls := range lhs {
|
||||
sigOrd := lhsh[i].sigOrdinal
|
||||
|
||||
rs, found := rightSigs[sigOrd] // Look for a match in the rhs Vector.
|
||||
if !found {
|
||||
fill := matching.FillValues.RHS
|
||||
if fill == nil {
|
||||
continue
|
||||
}
|
||||
rs = Sample{
|
||||
Metric: ls.Metric.MatchLabels(matching.On, matching.MatchingLabels...),
|
||||
F: *fill,
|
||||
}
|
||||
}
|
||||
|
||||
doBinOp(ls, rs, sigOrd)
|
||||
}
|
||||
|
||||
// For any rhs samples which have not been matched, check if we need to
|
||||
// perform the operation with a fill value from the lhs.
|
||||
if fill := matching.FillValues.LHS; fill != nil {
|
||||
for sigOrd, rs := range rightSigs {
|
||||
if _, matched := matchedSigs[sigOrd]; matched {
|
||||
continue // Already matched.
|
||||
}
|
||||
ls := Sample{
|
||||
Metric: rs.Metric.MatchLabels(matching.On, matching.MatchingLabels...),
|
||||
F: *fill,
|
||||
}
|
||||
|
||||
doBinOp(ls, rs, sigOrd)
|
||||
}
|
||||
}
|
||||
|
||||
return enh.Out, lastErr
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -318,6 +318,19 @@ type VectorMatching struct {
|
|||
// Include contains additional labels that should be included in
|
||||
// the result from the side with the lower cardinality.
|
||||
Include []string
|
||||
// Fill-in values to use when a series from one side does not find a match on the other side.
|
||||
FillValues VectorMatchFillValues
|
||||
}
|
||||
|
||||
// VectorMatchFillValues contains the fill values to use for Vector matching
|
||||
// when one side does not find a match on the other side.
|
||||
// When a fill value is nil, no fill is applied for that side, and there
|
||||
// is no output for the match group if there is no match.
|
||||
type VectorMatchFillValues struct {
|
||||
// RHS is the fill value to use for the right-hand side.
|
||||
RHS *float64
|
||||
// LHS is the fill value to use for the left-hand side.
|
||||
LHS *float64
|
||||
}
|
||||
|
||||
// Visitor allows visiting a Node and its child nodes. The Visit method is
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ func RegisterFeatures(r features.Collector) {
|
|||
switch keyword {
|
||||
case "anchored", "smoothed":
|
||||
r.Set(features.PromQL, keyword, EnableExtendedRangeSelectors)
|
||||
case "fill", "fill_left", "fill_right":
|
||||
r.Set(features.PromQL, keyword, EnableBinopFillModifiers)
|
||||
default:
|
||||
r.Enable(features.PromQL, keyword)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -139,6 +139,9 @@ BOOL
|
|||
BY
|
||||
GROUP_LEFT
|
||||
GROUP_RIGHT
|
||||
FILL
|
||||
FILL_LEFT
|
||||
FILL_RIGHT
|
||||
IGNORING
|
||||
OFFSET
|
||||
SMOOTHED
|
||||
|
|
@ -190,7 +193,7 @@ START_METRIC_SELECTOR
|
|||
%type <int> int
|
||||
%type <uint> uint
|
||||
%type <float> number series_value signed_number signed_or_unsigned_number
|
||||
%type <node> step_invariant_expr aggregate_expr aggregate_modifier bin_modifier binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers label_matchers matrix_selector number_duration_literal offset_expr anchored_expr smoothed_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector duration_expr paren_duration_expr positive_duration_expr offset_duration_expr
|
||||
%type <node> step_invariant_expr aggregate_expr aggregate_modifier bin_modifier fill_modifiers binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers fill_value label_matchers matrix_selector number_duration_literal offset_expr anchored_expr smoothed_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector duration_expr paren_duration_expr positive_duration_expr offset_duration_expr
|
||||
|
||||
%start start
|
||||
|
||||
|
|
@ -302,7 +305,7 @@ binary_expr : expr ADD bin_modifier expr { $$ = yylex.(*parser).newBinar
|
|||
|
||||
// Using left recursion for the modifier rules, helps to keep the parser stack small and
|
||||
// reduces allocations.
|
||||
bin_modifier : group_modifiers;
|
||||
bin_modifier : fill_modifiers;
|
||||
|
||||
bool_modifier : /* empty */
|
||||
{ $$ = &BinaryExpr{
|
||||
|
|
@ -346,6 +349,47 @@ group_modifiers: bool_modifier /* empty */
|
|||
}
|
||||
;
|
||||
|
||||
fill_modifiers: group_modifiers /* empty */
|
||||
/* Only fill() */
|
||||
| group_modifiers FILL fill_value
|
||||
{
|
||||
$$ = $1
|
||||
fill := $3.(*NumberLiteral).Val
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill
|
||||
}
|
||||
/* Only fill_left() */
|
||||
| group_modifiers FILL_LEFT fill_value
|
||||
{
|
||||
$$ = $1
|
||||
fill := $3.(*NumberLiteral).Val
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill
|
||||
}
|
||||
/* Only fill_right() */
|
||||
| group_modifiers FILL_RIGHT fill_value
|
||||
{
|
||||
$$ = $1
|
||||
fill := $3.(*NumberLiteral).Val
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill
|
||||
}
|
||||
/* fill_left() fill_right() */
|
||||
| group_modifiers FILL_LEFT fill_value FILL_RIGHT fill_value
|
||||
{
|
||||
$$ = $1
|
||||
fill_left := $3.(*NumberLiteral).Val
|
||||
fill_right := $5.(*NumberLiteral).Val
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right
|
||||
}
|
||||
/* fill_right() fill_left() */
|
||||
| group_modifiers FILL_RIGHT fill_value FILL_LEFT fill_value
|
||||
{
|
||||
fill_right := $3.(*NumberLiteral).Val
|
||||
fill_left := $5.(*NumberLiteral).Val
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.LHS = &fill_left
|
||||
$$.(*BinaryExpr).VectorMatching.FillValues.RHS = &fill_right
|
||||
}
|
||||
;
|
||||
|
||||
grouping_labels : LEFT_PAREN grouping_label_list RIGHT_PAREN
|
||||
{ $$ = $2 }
|
||||
|
|
@ -387,6 +431,21 @@ grouping_label : maybe_label
|
|||
{ yylex.(*parser).unexpected("grouping opts", "label"); $$ = Item{} }
|
||||
;
|
||||
|
||||
fill_value : LEFT_PAREN number_duration_literal RIGHT_PAREN
|
||||
{
|
||||
$$ = $2.(*NumberLiteral)
|
||||
}
|
||||
| LEFT_PAREN unary_op number_duration_literal RIGHT_PAREN
|
||||
{
|
||||
nl := $3.(*NumberLiteral)
|
||||
if $2.Typ == SUB {
|
||||
nl.Val *= -1
|
||||
}
|
||||
nl.PosRange.Start = $2.Pos
|
||||
$$ = nl
|
||||
}
|
||||
;
|
||||
|
||||
/*
|
||||
* Function calls.
|
||||
*/
|
||||
|
|
@ -697,7 +756,7 @@ metric : metric_identifier label_set
|
|||
;
|
||||
|
||||
|
||||
metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED;
|
||||
metric_identifier: AVG | BOTTOMK | BY | COUNT | COUNT_VALUES | FILL | FILL_LEFT | FILL_RIGHT | GROUP | IDENTIFIER | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | QUANTILE | STDDEV | STDVAR | SUM | TOPK | WITHOUT | START | END | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED;
|
||||
|
||||
label_set : LEFT_BRACE label_set_list RIGHT_BRACE
|
||||
{ $$ = labels.New($2...) }
|
||||
|
|
@ -954,7 +1013,7 @@ counter_reset_hint : UNKNOWN_COUNTER_RESET | COUNTER_RESET | NOT_COUNTER_RESET |
|
|||
aggregate_op : AVG | BOTTOMK | COUNT | COUNT_VALUES | GROUP | MAX | MIN | QUANTILE | STDDEV | STDVAR | SUM | TOPK | LIMITK | LIMIT_RATIO;
|
||||
|
||||
// Inside of grouping options label names can be recognized as keywords by the lexer. This is a list of keywords that could also be a label name.
|
||||
maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2 | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED;
|
||||
maybe_label : AVG | BOOL | BOTTOMK | BY | COUNT | COUNT_VALUES | GROUP | GROUP_LEFT | GROUP_RIGHT | FILL | FILL_LEFT | FILL_RIGHT | IDENTIFIER | IGNORING | LAND | LOR | LUNLESS | MAX | METRIC_IDENTIFIER | MIN | OFFSET | ON | QUANTILE | STDDEV | STDVAR | SUM | TOPK | START | END | ATAN2 | LIMITK | LIMIT_RATIO | STEP | RANGE | ANCHORED | SMOOTHED;
|
||||
|
||||
unary_op : ADD | SUB;
|
||||
|
||||
|
|
@ -1162,7 +1221,7 @@ offset_duration_expr : number_duration_literal
|
|||
}
|
||||
| duration_expr
|
||||
;
|
||||
|
||||
|
||||
min_max: MIN | MAX ;
|
||||
|
||||
duration_expr : number_duration_literal
|
||||
|
|
@ -1277,14 +1336,14 @@ duration_expr : number_duration_literal
|
|||
;
|
||||
|
||||
paren_duration_expr : LEFT_PAREN duration_expr RIGHT_PAREN
|
||||
{
|
||||
{
|
||||
yylex.(*parser).experimentalDurationExpr($2.(Expr))
|
||||
if durationExpr, ok := $2.(*DurationExpr); ok {
|
||||
durationExpr.Wrapped = true
|
||||
$$ = durationExpr
|
||||
break
|
||||
}
|
||||
$$ = $2
|
||||
$$ = $2
|
||||
}
|
||||
;
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -137,6 +137,9 @@ var key = map[string]ItemType{
|
|||
"ignoring": IGNORING,
|
||||
"group_left": GROUP_LEFT,
|
||||
"group_right": GROUP_RIGHT,
|
||||
"fill": FILL,
|
||||
"fill_left": FILL_LEFT,
|
||||
"fill_right": FILL_RIGHT,
|
||||
"bool": BOOL,
|
||||
|
||||
// Preprocessors.
|
||||
|
|
@ -1083,6 +1086,17 @@ Loop:
|
|||
word := l.input[l.start:l.pos]
|
||||
switch kw, ok := key[strings.ToLower(word)]; {
|
||||
case ok:
|
||||
// For fill/fill_left/fill_right, only treat as keyword if followed by '('
|
||||
// This allows using these as metric names (e.g., "fill + fill").
|
||||
// This could be done for other keywords as well, but for the new fill
|
||||
// modifiers this is especially important so we don't break any existing
|
||||
// queries.
|
||||
if kw == FILL || kw == FILL_LEFT || kw == FILL_RIGHT {
|
||||
if !l.peekFollowedByLeftParen() {
|
||||
l.emit(IDENTIFIER)
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
l.emit(kw)
|
||||
case !strings.Contains(word, ":"):
|
||||
l.emit(IDENTIFIER)
|
||||
|
|
@ -1098,6 +1112,23 @@ Loop:
|
|||
return lexStatements
|
||||
}
|
||||
|
||||
// peekFollowedByLeftParen checks if the next non-whitespace character is '('.
|
||||
// This is used for context-sensitive keywords like fill/fill_left/fill_right
|
||||
// that should only be treated as keywords when followed by '('.
|
||||
func (l *Lexer) peekFollowedByLeftParen() bool {
|
||||
pos := l.pos
|
||||
for {
|
||||
if int(pos) >= len(l.input) {
|
||||
return false
|
||||
}
|
||||
r, w := utf8.DecodeRuneInString(l.input[pos:])
|
||||
if !isSpace(r) {
|
||||
return r == '('
|
||||
}
|
||||
pos += posrange.Pos(w)
|
||||
}
|
||||
}
|
||||
|
||||
func isSpace(r rune) bool {
|
||||
return r == ' ' || r == '\t' || r == '\n' || r == '\r'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,6 +45,9 @@ var ExperimentalDurationExpr bool
|
|||
// EnableExtendedRangeSelectors is a flag to enable experimental extended range selectors.
|
||||
var EnableExtendedRangeSelectors bool
|
||||
|
||||
// EnableBinopFillModifiers is a flag to enable experimental fill modifiers for binary operators.
|
||||
var EnableBinopFillModifiers bool
|
||||
|
||||
type Parser interface {
|
||||
ParseExpr() (Expr, error)
|
||||
Close()
|
||||
|
|
@ -413,13 +416,18 @@ func (p *parser) InjectItem(typ ItemType) {
|
|||
p.injecting = true
|
||||
}
|
||||
|
||||
func (*parser) newBinaryExpression(lhs Node, op Item, modifiers, rhs Node) *BinaryExpr {
|
||||
func (p *parser) newBinaryExpression(lhs Node, op Item, modifiers, rhs Node) *BinaryExpr {
|
||||
ret := modifiers.(*BinaryExpr)
|
||||
|
||||
ret.LHS = lhs.(Expr)
|
||||
ret.RHS = rhs.(Expr)
|
||||
ret.Op = op.Typ
|
||||
|
||||
if !EnableBinopFillModifiers && (ret.VectorMatching.FillValues.LHS != nil || ret.VectorMatching.FillValues.RHS != nil) {
|
||||
p.addParseErrf(ret.PositionRange(), "binop fill modifiers are experimental and not enabled")
|
||||
return ret
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
|
|
@ -768,6 +776,9 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
|
|||
if len(n.VectorMatching.MatchingLabels) > 0 {
|
||||
p.addParseErrf(n.PositionRange(), "vector matching only allowed between instant vectors")
|
||||
}
|
||||
if n.VectorMatching.FillValues.LHS != nil || n.VectorMatching.FillValues.RHS != nil {
|
||||
p.addParseErrf(n.PositionRange(), "filling in missing series only allowed between instant vectors")
|
||||
}
|
||||
n.VectorMatching = nil
|
||||
case n.Op.IsSetOperator(): // Both operands are Vectors.
|
||||
if n.VectorMatching.Card == CardOneToMany || n.VectorMatching.Card == CardManyToOne {
|
||||
|
|
@ -776,6 +787,9 @@ func (p *parser) checkAST(node Node) (typ ValueType) {
|
|||
if n.VectorMatching.Card != CardManyToMany {
|
||||
p.addParseErrf(n.PositionRange(), "set operations must always be many-to-many")
|
||||
}
|
||||
if n.VectorMatching.FillValues.LHS != nil || n.VectorMatching.FillValues.RHS != nil {
|
||||
p.addParseErrf(n.PositionRange(), "filling in missing series not allowed for set operators")
|
||||
}
|
||||
}
|
||||
|
||||
if (lt == ValueTypeScalar || rt == ValueTypeScalar) && n.Op.IsSetOperator() {
|
||||
|
|
|
|||
|
|
@ -172,6 +172,19 @@ func (node *BinaryExpr) getMatchingStr() string {
|
|||
b.WriteString(")")
|
||||
matching += b.String()
|
||||
}
|
||||
|
||||
if vm.FillValues.LHS != nil || vm.FillValues.RHS != nil {
|
||||
if vm.FillValues.LHS == vm.FillValues.RHS {
|
||||
matching += fmt.Sprintf(" fill (%v)", *vm.FillValues.LHS)
|
||||
} else {
|
||||
if vm.FillValues.LHS != nil {
|
||||
matching += fmt.Sprintf(" fill_left (%v)", *vm.FillValues.LHS)
|
||||
}
|
||||
if vm.FillValues.RHS != nil {
|
||||
matching += fmt.Sprintf(" fill_right (%v)", *vm.FillValues.RHS)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return matching
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,8 +23,10 @@ import (
|
|||
|
||||
func TestExprString(t *testing.T) {
|
||||
ExperimentalDurationExpr = true
|
||||
EnableBinopFillModifiers = true
|
||||
t.Cleanup(func() {
|
||||
ExperimentalDurationExpr = false
|
||||
EnableBinopFillModifiers = false
|
||||
})
|
||||
// A list of valid expressions that are expected to be
|
||||
// returned as out when calling String(). If out is empty the output
|
||||
|
|
@ -113,6 +115,26 @@ func TestExprString(t *testing.T) {
|
|||
in: `a - ignoring() group_left c`,
|
||||
out: `a - ignoring () group_left () c`,
|
||||
},
|
||||
{
|
||||
in: `a + fill(-23) b`,
|
||||
out: `a + fill (-23) b`,
|
||||
},
|
||||
{
|
||||
in: `a + fill_left(-23) b`,
|
||||
out: `a + fill_left (-23) b`,
|
||||
},
|
||||
{
|
||||
in: `a + fill_right(42) b`,
|
||||
out: `a + fill_right (42) b`,
|
||||
},
|
||||
{
|
||||
in: `a + fill_left(-23) fill_right(42) b`,
|
||||
out: `a + fill_left (-23) fill_right (42) b`,
|
||||
},
|
||||
{
|
||||
in: `a + on(b) group_left fill(-23) c`,
|
||||
out: `a + on (b) group_left () fill (-23) c`,
|
||||
},
|
||||
{
|
||||
in: `up > bool 0`,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -157,10 +157,12 @@ func RunBuiltinTestsWithStorage(t TBRun, engine promql.QueryEngine, newStorage f
|
|||
parser.EnableExperimentalFunctions = false
|
||||
parser.ExperimentalDurationExpr = false
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
parser.EnableBinopFillModifiers = false
|
||||
})
|
||||
parser.EnableExperimentalFunctions = true
|
||||
parser.ExperimentalDurationExpr = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
parser.EnableBinopFillModifiers = true
|
||||
|
||||
files, err := fs.Glob(testsFs, "*/*.test")
|
||||
require.NoError(t, err)
|
||||
|
|
|
|||
383
promql/promqltest/testdata/fill-modifier.test
vendored
Normal file
383
promql/promqltest/testdata/fill-modifier.test
vendored
Normal file
|
|
@ -0,0 +1,383 @@
|
|||
# ==================== fill / fill_left / fill_right modifier tests ====================
|
||||
|
||||
# Test data for fill modifier tests: vectors with partial overlap.
|
||||
load 5m
|
||||
left_vector{label="a"} 10
|
||||
left_vector{label="b"} 20
|
||||
left_vector{label="c"} 30
|
||||
right_vector{label="a"} 100
|
||||
right_vector{label="b"} 200
|
||||
right_vector{label="d"} 400
|
||||
|
||||
# ---------- Arithmetic operators with fill modifiers ----------
|
||||
|
||||
# fill(0): Fill both sides with 0 for addition.
|
||||
eval instant at 0m left_vector + fill(0) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} 30
|
||||
{label="d"} 400
|
||||
|
||||
# fill_left(0): Only fill left side with 0.
|
||||
eval instant at 0m left_vector + fill_left(0) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="d"} 400
|
||||
|
||||
# fill_right(0): Only fill right side with 0.
|
||||
eval instant at 0m left_vector + fill_right(0) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} 30
|
||||
|
||||
# fill_left and fill_right with different values.
|
||||
eval instant at 0m left_vector + fill_left(5) fill_right(7) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} 37
|
||||
{label="d"} 405
|
||||
|
||||
# fill with NaN.
|
||||
eval instant at 0m left_vector + fill(NaN) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} NaN
|
||||
{label="d"} NaN
|
||||
|
||||
# fill with Inf.
|
||||
eval instant at 0m left_vector + fill(Inf) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} +Inf
|
||||
{label="d"} +Inf
|
||||
|
||||
# fill with -Inf.
|
||||
eval instant at 0m left_vector + fill(-Inf) right_vector
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
{label="c"} -Inf
|
||||
{label="d"} -Inf
|
||||
|
||||
# ---------- Comparison operators with fill modifiers ----------
|
||||
|
||||
# fill with equality comparison.
|
||||
eval instant at 0m left_vector == fill(30) right_vector
|
||||
left_vector{label="c"} 30
|
||||
|
||||
# fill with inequality comparison.
|
||||
eval instant at 0m left_vector != fill(30) right_vector
|
||||
left_vector{label="a"} 10
|
||||
left_vector{label="b"} 20
|
||||
{label="d"} 30
|
||||
|
||||
# fill with greater than.
|
||||
eval instant at 0m left_vector > fill(25) right_vector
|
||||
left_vector{label="c"} 30
|
||||
|
||||
# ---------- Comparison operators with bool modifier and fill ----------
|
||||
|
||||
# fill with equality comparison and bool.
|
||||
eval instant at 0m left_vector == bool fill(30) right_vector
|
||||
{label="a"} 0
|
||||
{label="b"} 0
|
||||
{label="c"} 1
|
||||
{label="d"} 0
|
||||
|
||||
# fill with inequality comparison and bool.
|
||||
eval instant at 0m left_vector != bool fill(30) right_vector
|
||||
{label="a"} 1
|
||||
{label="b"} 1
|
||||
{label="c"} 0
|
||||
{label="d"} 1
|
||||
|
||||
# fill with greater than and bool.
|
||||
eval instant at 0m left_vector > bool fill(25) right_vector
|
||||
{label="a"} 0
|
||||
{label="b"} 0
|
||||
{label="c"} 1
|
||||
{label="d"} 0
|
||||
|
||||
# ---------- fill with on() and ignoring() modifiers ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
left_vector{job="foo", instance="a"} 10
|
||||
left_vector{job="foo", instance="b"} 20
|
||||
left_vector{job="bar", instance="a"} 30
|
||||
right_vector{job="foo", instance="a"} 100
|
||||
right_vector{job="foo", instance="c"} 300
|
||||
|
||||
# fill with on().
|
||||
eval instant at 0m left_vector + on(job, instance) fill(0) right_vector
|
||||
{job="foo", instance="a"} 110
|
||||
{job="foo", instance="b"} 20
|
||||
{job="bar", instance="a"} 30
|
||||
{job="foo", instance="c"} 300
|
||||
|
||||
# fill_right with on().
|
||||
eval instant at 0m left_vector + on(job, instance) fill_right(0) right_vector
|
||||
{job="foo", instance="a"} 110
|
||||
{job="foo", instance="b"} 20
|
||||
{job="bar", instance="a"} 30
|
||||
|
||||
# fill_left with on().
|
||||
eval instant at 0m left_vector + on(job, instance) fill_left(0) right_vector
|
||||
{job="foo", instance="a"} 110
|
||||
{job="foo", instance="c"} 300
|
||||
|
||||
# fill with ignoring() - requires group_left since ignoring(job) creates many-to-one matching
|
||||
# when two left_vector series have same instance but different jobs.
|
||||
eval instant at 0m left_vector + ignoring(job) group_left fill(0) right_vector
|
||||
{instance="a", job="foo"} 110
|
||||
{instance="a", job="bar"} 130
|
||||
{instance="b", job="foo"} 20
|
||||
{instance="c"} 300
|
||||
|
||||
# ---------- fill with group_left / group_right (many-to-one / one-to-many) ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
requests{method="GET", status="200"} 100
|
||||
requests{method="POST", status="200"} 200
|
||||
requests{method="GET", status="500"} 10
|
||||
requests{method="POST", status="500"} 20
|
||||
limits{status="200"} 1000
|
||||
limits{status="404"} 500
|
||||
limits{status="500"} 50
|
||||
|
||||
# group_left with fill_right: fill missing "one" side series.
|
||||
eval instant at 0m requests / on(status) group_left fill_right(1) limits
|
||||
{method="GET", status="200"} 0.1
|
||||
{method="POST", status="200"} 0.2
|
||||
{method="GET", status="500"} 0.2
|
||||
{method="POST", status="500"} 0.4
|
||||
|
||||
# group_left with fill_left: fill missing "many" side series.
|
||||
# For status="404", there's no matching requests, so a single series with the match group's labels is filled
|
||||
eval instant at 0m requests + on(status) group_left fill_left(0) limits
|
||||
{method="GET", status="200"} 1100
|
||||
{method="POST", status="200"} 1200
|
||||
{method="GET", status="500"} 60
|
||||
{method="POST", status="500"} 70
|
||||
{status="404"} 500
|
||||
|
||||
# group_left with fill on both sides.
|
||||
eval instant at 0m requests + on(status) group_left fill(0) limits
|
||||
{method="GET", status="200"} 1100
|
||||
{method="POST", status="200"} 1200
|
||||
{method="GET", status="500"} 60
|
||||
{method="POST", status="500"} 70
|
||||
{status="404"} 500
|
||||
|
||||
# group_right with fill_left: fill missing "one" side series.
|
||||
clear
|
||||
|
||||
load 5m
|
||||
cpu_info{instance="a", cpu="0"} 1
|
||||
cpu_info{instance="a", cpu="1"} 1
|
||||
cpu_info{instance="b", cpu="0"} 1
|
||||
node_meta{instance="a"} 100
|
||||
node_meta{instance="c"} 300
|
||||
|
||||
# fill_left fills the "one" side (node_meta) when missing for a "many" side series.
|
||||
eval instant at 0m node_meta * on(instance) group_right fill_left(1) cpu_info
|
||||
{instance="a", cpu="0"} 100
|
||||
{instance="a", cpu="1"} 100
|
||||
{instance="c"} 300
|
||||
|
||||
# group_right with fill_right: fill missing "many" side series.
|
||||
eval instant at 0m node_meta * on(instance) group_right fill_right(0) cpu_info
|
||||
{instance="a", cpu="0"} 100
|
||||
{instance="a", cpu="1"} 100
|
||||
{instance="b", cpu="0"} 0
|
||||
|
||||
# group_right with fill on both sides.
|
||||
eval instant at 0m node_meta * on(instance) group_right fill(1) cpu_info
|
||||
{instance="a", cpu="0"} 100
|
||||
{instance="a", cpu="1"} 100
|
||||
{instance="b", cpu="0"} 1
|
||||
{instance="c"} 300
|
||||
|
||||
# ---------- fill with group_left/group_right and extra labels ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
requests{method="GET", status="200"} 100
|
||||
requests{method="POST", status="200"} 200
|
||||
limits{status="200", owner="team-a"} 1000
|
||||
limits{status="500", owner="team-b"} 50
|
||||
|
||||
# group_left with extra label and fill_right.
|
||||
# Note: when filling the "one" side, the joined label cannot be filled.
|
||||
eval instant at 0m requests + on(status) group_left(owner) fill_right(0) limits
|
||||
{method="GET", status="200", owner="team-a"} 1100
|
||||
{method="POST", status="200", owner="team-a"} 1200
|
||||
|
||||
# ---------- Edge cases ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
only_left{label="a"} 10
|
||||
only_left{label="b"} 20
|
||||
only_right{label="c"} 30
|
||||
only_right{label="d"} 40
|
||||
|
||||
# No overlap at all - fill creates all results.
|
||||
eval instant at 0m only_left + fill(0) only_right
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
{label="c"} 30
|
||||
{label="d"} 40
|
||||
|
||||
# No overlap - fill_left only creates right side results.
|
||||
eval instant at 0m only_left + fill_left(0) only_right
|
||||
{label="c"} 30
|
||||
{label="d"} 40
|
||||
|
||||
# No overlap - fill_right only creates left side results.
|
||||
eval instant at 0m only_left + fill_right(0) only_right
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
|
||||
# Complete overlap - fill has no effect.
|
||||
clear
|
||||
|
||||
load 5m
|
||||
complete_left{label="a"} 10
|
||||
complete_left{label="b"} 20
|
||||
complete_right{label="a"} 100
|
||||
complete_right{label="b"} 200
|
||||
|
||||
eval instant at 0m complete_left + fill(99) complete_right
|
||||
{label="a"} 110
|
||||
{label="b"} 220
|
||||
|
||||
# ---------- fill with range queries ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
range_left{label="a"} 1 2 3 4 5
|
||||
range_left{label="b"} 10 20 30 40 50
|
||||
range_right{label="a"} 100 200 300 400 500
|
||||
range_right{label="c"} 1000 2000 3000 4000 5000
|
||||
|
||||
eval range from 0 to 20m step 5m range_left + fill(0) range_right
|
||||
{label="a"} 101 202 303 404 505
|
||||
{label="b"} 10 20 30 40 50
|
||||
{label="c"} 1000 2000 3000 4000 5000
|
||||
|
||||
eval range from 0 to 20m step 5m range_left + fill_right(0) range_right
|
||||
{label="a"} 101 202 303 404 505
|
||||
{label="b"} 10 20 30 40 50
|
||||
|
||||
eval range from 0 to 20m step 5m range_left + fill_left(0) range_right
|
||||
{label="a"} 101 202 303 404 505
|
||||
{label="c"} 1000 2000 3000 4000 5000
|
||||
|
||||
# Range queries with intermittently present series.
|
||||
clear
|
||||
|
||||
load 5m
|
||||
intermittent_left{label="a"} 1 _ 3 _ 5
|
||||
intermittent_left{label="b"} _ 20 _ 40 _
|
||||
intermittent_right{label="a"} _ 200 _ 400 _
|
||||
intermittent_right{label="b"} 100 _ 300 _ 500
|
||||
intermittent_right{label="c"} 1000 _ _ 4000 5000
|
||||
|
||||
# When both sides have the same label but are present at different times,
|
||||
# fill creates results at all timestamps where at least one side is present.
|
||||
eval range from 0 to 20m step 5m intermittent_left + fill(0) intermittent_right
|
||||
{label="a"} 1 200 3 400 5
|
||||
{label="b"} 100 20 300 40 500
|
||||
{label="c"} 1000 _ _ 4000 5000
|
||||
|
||||
# fill_right only fills the right side when it's missing.
|
||||
# Output only exists when left side is present (right side filled with 0 if missing).
|
||||
eval range from 0 to 20m step 5m intermittent_left + fill_right(0) intermittent_right
|
||||
{label="a"} 1 _ 3 _ 5
|
||||
{label="b"} _ 20 _ 40 _
|
||||
|
||||
# fill_left only fills the left side when it's missing.
|
||||
# Output only exists when right side is present (left side filled with 0 if missing).
|
||||
eval range from 0 to 20m step 5m intermittent_left + fill_left(0) intermittent_right
|
||||
{label="a"} _ 200 _ 400 _
|
||||
{label="b"} 100 _ 300 _ 500
|
||||
{label="c"} 1000 _ _ 4000 5000
|
||||
|
||||
# ---------- fill with vectors where one side is empty ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
non_empty{label="a"} 10
|
||||
non_empty{label="b"} 20
|
||||
|
||||
# Empty right side - fill_right has no effect (nothing to add).
|
||||
eval instant at 0m non_empty + fill_right(0) nonexistent
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
|
||||
# Empty right side - fill_left creates nothing (no right side labels to use).
|
||||
eval instant at 0m non_empty + fill_left(0) nonexistent
|
||||
|
||||
# Empty left side - fill_left has no effect.
|
||||
eval instant at 0m nonexistent + fill_left(0) non_empty
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
|
||||
# Empty left side - fill_right creates nothing.
|
||||
eval instant at 0m nonexistent + fill_right(0) non_empty
|
||||
|
||||
# fill both sides with one side empty.
|
||||
eval instant at 0m non_empty + fill(0) nonexistent
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
|
||||
eval instant at 0m nonexistent + fill(0) non_empty
|
||||
{label="a"} 10
|
||||
{label="b"} 20
|
||||
|
||||
# ---------- Metric names that match fill modifier keywords ----------
|
||||
|
||||
clear
|
||||
|
||||
load 5m
|
||||
fill{label="a"} 1
|
||||
fill{label="b"} 2
|
||||
fill_left{label="a"} 10
|
||||
fill_left{label="c"} 30
|
||||
fill_right{label="b"} 200
|
||||
fill_right{label="d"} 400
|
||||
other{label="a"} 1000
|
||||
other{label="e"} 5000
|
||||
|
||||
# Metric named "fill" on the left side.
|
||||
eval instant at 0m fill + fill(0) other
|
||||
{label="a"} 1001
|
||||
{label="b"} 2
|
||||
{label="e"} 5000
|
||||
|
||||
# Metric named "fill" on the right side without modifier.
|
||||
eval instant at 0m other + fill
|
||||
{label="a"} 1001
|
||||
|
||||
# Metric named "fill" on the right side with fill() modifier.
|
||||
eval instant at 0m other + fill(0) fill
|
||||
{label="a"} 1001
|
||||
{label="b"} 2
|
||||
{label="e"} 5000
|
||||
|
||||
# Metric named "fill_left" on the right side with fill_left() modifier.
|
||||
eval instant at 0m other + fill_left(0) fill_left
|
||||
{label="a"} 1010
|
||||
{label="c"} 30
|
||||
|
||||
# Metric named "fill_right" on the right side with fill_right() modifier.
|
||||
eval instant at 0m other + fill_right(0) fill_right
|
||||
{label="a"} 1000
|
||||
{label="e"} 5000
|
||||
|
|
@ -120,10 +120,12 @@ func FuzzParseExpr(f *testing.F) {
|
|||
parser.EnableExperimentalFunctions = true
|
||||
parser.ExperimentalDurationExpr = true
|
||||
parser.EnableExtendedRangeSelectors = true
|
||||
parser.EnableBinopFillModifiers = true
|
||||
f.Cleanup(func() {
|
||||
parser.EnableExperimentalFunctions = false
|
||||
parser.ExperimentalDurationExpr = false
|
||||
parser.EnableExtendedRangeSelectors = false
|
||||
parser.EnableBinopFillModifiers = false
|
||||
})
|
||||
|
||||
// Add seed corpus from built-in test expressions
|
||||
|
|
|
|||
|
|
@ -47,6 +47,10 @@ func translateAST(node parser.Expr) any {
|
|||
"labels": sanitizeList(m.MatchingLabels),
|
||||
"on": m.On,
|
||||
"include": sanitizeList(m.Include),
|
||||
"fillValues": map[string]*float64{
|
||||
"lhs": m.FillValues.LHS,
|
||||
"rhs": m.FillValues.RHS,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import {
|
|||
MatchErrorType,
|
||||
computeVectorVectorBinOp,
|
||||
filteredSampleValue,
|
||||
MaybeFilledInstantSample,
|
||||
} from "../../../../promql/binOp";
|
||||
import { formatNode, labelNameList } from "../../../../promql/format";
|
||||
import {
|
||||
|
|
@ -177,11 +178,10 @@ const explanationText = (node: BinaryExpr): React.ReactNode => {
|
|||
</List.Item>
|
||||
) : (
|
||||
<List.Item>
|
||||
<span className="promql-code promql-keyword">
|
||||
group_{manySide}({labelNameList(matching.include)})
|
||||
</span>
|
||||
: {matching.card} match. Each series from the {oneSide}-hand side is
|
||||
allowed to match with multiple series from the {manySide}-hand side.
|
||||
<span className="promql-code promql-keyword">group_{manySide}</span>
|
||||
({labelNameList(matching.include)}) : {matching.card} match. Each
|
||||
series from the {oneSide}-hand side is allowed to match with
|
||||
multiple series from the {manySide}-hand side.
|
||||
{matching.include.length !== 0 && (
|
||||
<>
|
||||
{" "}
|
||||
|
|
@ -192,6 +192,55 @@ const explanationText = (node: BinaryExpr): React.ReactNode => {
|
|||
)}
|
||||
</List.Item>
|
||||
)}
|
||||
{(matching.fillValues.lhs !== null ||
|
||||
matching.fillValues.rhs !== null) &&
|
||||
(matching.fillValues.lhs === matching.fillValues.rhs ? (
|
||||
<List.Item>
|
||||
<span className="promql-code promql-keyword">fill</span>(
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.lhs}
|
||||
</span>
|
||||
) : For series on either side missing a match, fill in the sample
|
||||
value{" "}
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.lhs}
|
||||
</span>
|
||||
.
|
||||
</List.Item>
|
||||
) : (
|
||||
<>
|
||||
{matching.fillValues.lhs !== null && (
|
||||
<List.Item>
|
||||
<span className="promql-code promql-keyword">fill_left</span>(
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.lhs}
|
||||
</span>
|
||||
) : For series on the left-hand side missing a match, fill in
|
||||
the sample value{" "}
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.lhs}
|
||||
</span>
|
||||
.
|
||||
</List.Item>
|
||||
)}
|
||||
|
||||
{matching.fillValues.rhs !== null && (
|
||||
<List.Item>
|
||||
<span className="promql-code promql-keyword">fill_right</span>
|
||||
(
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.rhs}
|
||||
</span>
|
||||
) : For series on the right-hand side missing a match, fill in
|
||||
the sample value{" "}
|
||||
<span className="promql-code promql-number">
|
||||
{matching.fillValues.rhs}
|
||||
</span>
|
||||
.
|
||||
</List.Item>
|
||||
)}
|
||||
</>
|
||||
))}
|
||||
{node.bool && (
|
||||
<List.Item>
|
||||
<span className="promql-code promql-keyword">bool</span>: Instead of
|
||||
|
|
@ -239,7 +288,12 @@ const explainError = (
|
|||
matching: {
|
||||
...(binOp.matching
|
||||
? binOp.matching
|
||||
: { labels: [], on: false, include: [] }),
|
||||
: {
|
||||
labels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
}),
|
||||
card:
|
||||
err.dupeSide === "left"
|
||||
? vectorMatchCardinality.manyToOne
|
||||
|
|
@ -403,7 +457,7 @@ const VectorVectorBinaryExprExplainView: FC<
|
|||
);
|
||||
|
||||
const matchGroupTable = (
|
||||
series: InstantSample[],
|
||||
series: MaybeFilledInstantSample[],
|
||||
seriesCount: number,
|
||||
color: string,
|
||||
colorOffset?: number
|
||||
|
|
@ -458,6 +512,11 @@ const VectorVectorBinaryExprExplainView: FC<
|
|||
)}
|
||||
format={true}
|
||||
/>
|
||||
{s.filled && (
|
||||
<Text size="sm" c="dimmed">
|
||||
no match, filling in default value
|
||||
</Text>
|
||||
)}
|
||||
</Group>
|
||||
</Table.Td>
|
||||
{showSampleValues && (
|
||||
|
|
|
|||
|
|
@ -104,11 +104,16 @@ export interface LabelMatcher {
|
|||
value: string;
|
||||
}
|
||||
|
||||
export interface FillValues {
|
||||
lhs: number | null;
|
||||
rhs: number | null;
|
||||
}
|
||||
export interface VectorMatching {
|
||||
card: vectorMatchCardinality;
|
||||
labels: string[];
|
||||
on: boolean;
|
||||
include: string[];
|
||||
fillValues: FillValues;
|
||||
}
|
||||
|
||||
export type StartOrEnd = "start" | "end" | null;
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -247,6 +248,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1", "label2"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -413,6 +415,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: ["same"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -579,6 +582,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricB,
|
||||
rhs: testMetricC,
|
||||
|
|
@ -701,6 +705,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricB,
|
||||
rhs: testMetricC,
|
||||
|
|
@ -791,6 +796,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricB,
|
||||
rhs: testMetricC,
|
||||
|
|
@ -905,6 +911,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricC,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1019,6 +1026,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricC,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1107,6 +1115,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1223,6 +1232,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1409,6 +1419,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1596,6 +1607,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1763,6 +1775,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -1929,6 +1942,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -2022,6 +2036,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricB,
|
||||
rhs: testMetricC,
|
||||
|
|
@ -2105,6 +2120,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricB,
|
||||
rhs: testMetricC,
|
||||
|
|
@ -2147,6 +2163,437 @@ const testCases: TestCase[] = [
|
|||
numGroups: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
// metric_a - fill(0) metric_b
|
||||
desc: "subtraction with fill(0) but no missing series",
|
||||
op: binaryOperatorType.sub,
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: 0, rhs: 0 },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
result: {
|
||||
groups: {
|
||||
[fnv1a(["a", "x", "same"])]: {
|
||||
groupLabels: { label1: "a", label2: "x", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "a",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "1"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "a",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "10"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "x", same: "same" },
|
||||
value: [0, "-9"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["a", "y", "same"])]: {
|
||||
groupLabels: { label1: "a", label2: "y", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "a",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "2"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "a",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "20"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "y", same: "same" },
|
||||
value: [0, "-18"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["b", "x", "same"])]: {
|
||||
groupLabels: { label1: "b", label2: "x", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "b",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "3"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "b",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "30"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "b", label2: "x", same: "same" },
|
||||
value: [0, "-27"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["b", "y", "same"])]: {
|
||||
groupLabels: { label1: "b", label2: "y", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "b",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "4"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "b",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "40"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "b", label2: "y", same: "same" },
|
||||
value: [0, "-36"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
},
|
||||
numGroups: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
// metric_a[0..2] - fill_left(23) fill_right(42) metric_b[1...3]
|
||||
desc: "subtraction with different fill values and missing series on each side",
|
||||
op: binaryOperatorType.sub,
|
||||
matching: {
|
||||
card: vectorMatchCardinality.oneToOne,
|
||||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: 23, rhs: 42 },
|
||||
},
|
||||
lhs: testMetricA.slice(0, 3),
|
||||
rhs: testMetricB.slice(1, 4),
|
||||
result: {
|
||||
groups: {
|
||||
[fnv1a(["a", "x", "same"])]: {
|
||||
groupLabels: { label1: "a", label2: "x", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "a",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "1"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
label1: "a",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "42"],
|
||||
filled: true,
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "x", same: "same" },
|
||||
value: [0, "-41"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["a", "y", "same"])]: {
|
||||
groupLabels: { label1: "a", label2: "y", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "a",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "2"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "a",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "20"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "y", same: "same" },
|
||||
value: [0, "-18"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["b", "x", "same"])]: {
|
||||
groupLabels: { label1: "b", label2: "x", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_a",
|
||||
label1: "b",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "3"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "b",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "30"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "b", label2: "x", same: "same" },
|
||||
value: [0, "-27"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["b", "y", "same"])]: {
|
||||
groupLabels: { label1: "b", label2: "y", same: "same" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
label1: "b",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
filled: true,
|
||||
value: [0, "23"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "b",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "40"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "b", label2: "y", same: "same" },
|
||||
value: [0, "-17"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
},
|
||||
numGroups: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
// metric_b[0...1] - on(label1) group_left fill(0) metric_c
|
||||
desc: "many-to-one matching with matching labels specified, group_left, and fill specified",
|
||||
op: binaryOperatorType.sub,
|
||||
matching: {
|
||||
card: vectorMatchCardinality.manyToOne,
|
||||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: 0, rhs: 0 },
|
||||
},
|
||||
lhs: testMetricB.slice(0, 2),
|
||||
rhs: testMetricC,
|
||||
result: {
|
||||
groups: {
|
||||
[fnv1a(["a"])]: {
|
||||
groupLabels: { label1: "a" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "a",
|
||||
label2: "x",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "10"],
|
||||
},
|
||||
{
|
||||
metric: {
|
||||
__name__: "metric_b",
|
||||
label1: "a",
|
||||
label2: "y",
|
||||
same: "same",
|
||||
},
|
||||
value: [0, "20"],
|
||||
},
|
||||
],
|
||||
lhsCount: 2,
|
||||
rhs: [
|
||||
{
|
||||
metric: { __name__: "metric_c", label1: "a" },
|
||||
value: [0, "100"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "x", same: "same" },
|
||||
value: [0, "-90"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "a", label2: "y", same: "same" },
|
||||
value: [0, "-80"],
|
||||
},
|
||||
manySideIdx: 1,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
[fnv1a(["b"])]: {
|
||||
groupLabels: { label1: "b" },
|
||||
lhs: [
|
||||
{
|
||||
metric: {
|
||||
label1: "b",
|
||||
},
|
||||
filled: true,
|
||||
value: [0, "0"],
|
||||
},
|
||||
],
|
||||
lhsCount: 1,
|
||||
rhs: [
|
||||
{
|
||||
metric: { __name__: "metric_c", label1: "b" },
|
||||
value: [0, "200"],
|
||||
},
|
||||
],
|
||||
rhsCount: 1,
|
||||
result: [
|
||||
{
|
||||
sample: {
|
||||
metric: { label1: "b" },
|
||||
value: [0, "-200"],
|
||||
},
|
||||
manySideIdx: 0,
|
||||
},
|
||||
],
|
||||
error: null,
|
||||
},
|
||||
},
|
||||
numGroups: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
// metric_a and metric b
|
||||
desc: "and operator with no matching labels and matching groups",
|
||||
|
|
@ -2156,6 +2603,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA,
|
||||
rhs: testMetricB,
|
||||
|
|
@ -2342,6 +2790,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA.slice(0, 3),
|
||||
rhs: testMetricB.slice(1, 4),
|
||||
|
|
@ -2474,6 +2923,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA.slice(0, 3),
|
||||
rhs: testMetricB.slice(1, 4),
|
||||
|
|
@ -2568,6 +3018,7 @@ const testCases: TestCase[] = [
|
|||
on: true,
|
||||
include: [],
|
||||
labels: ["label1"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA.slice(0, 3),
|
||||
rhs: testMetricB.slice(1, 4),
|
||||
|
|
@ -2700,6 +3151,7 @@ const testCases: TestCase[] = [
|
|||
on: false,
|
||||
include: [],
|
||||
labels: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
lhs: testMetricA.slice(0, 3),
|
||||
rhs: testMetricB.slice(1, 4),
|
||||
|
|
@ -2886,6 +3338,7 @@ describe("binOp", () => {
|
|||
on: true,
|
||||
labels: ["label1"],
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
};
|
||||
|
||||
const result = resultMetric(lhs, rhs, op, matching);
|
||||
|
|
@ -2911,6 +3364,7 @@ describe("binOp", () => {
|
|||
on: true,
|
||||
labels: ["label1"],
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
};
|
||||
|
||||
const result = resultMetric(lhs, rhs, op, matching);
|
||||
|
|
@ -2931,6 +3385,7 @@ describe("binOp", () => {
|
|||
on: true,
|
||||
labels: ["label1"],
|
||||
include: ["label2"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
};
|
||||
|
||||
const result = resultMetric(lhs, rhs, op, matching);
|
||||
|
|
|
|||
|
|
@ -45,13 +45,18 @@ export type VectorMatchError =
|
|||
| MultipleMatchesOnBothSidesError
|
||||
| MultipleMatchesOnOneSideError;
|
||||
|
||||
export type MaybeFilledInstantSample = InstantSample & {
|
||||
// If the sample was filled in via a fill(...) modifier, this is true.
|
||||
filled?: boolean;
|
||||
};
|
||||
|
||||
// A single match group as produced by a vector-to-vector binary operation, with all of its
|
||||
// left-hand side and right-hand side series, as well as a result and error, if applicable.
|
||||
export type BinOpMatchGroup = {
|
||||
groupLabels: Metric;
|
||||
rhs: InstantSample[];
|
||||
rhs: MaybeFilledInstantSample[];
|
||||
rhsCount: number; // Number of samples before applying limits.
|
||||
lhs: InstantSample[];
|
||||
lhs: MaybeFilledInstantSample[];
|
||||
lhsCount: number; // Number of samples before applying limits.
|
||||
result: {
|
||||
sample: InstantSample;
|
||||
|
|
@ -338,6 +343,26 @@ export const computeVectorVectorBinOp = (
|
|||
groups[sig].lhsCount++;
|
||||
});
|
||||
|
||||
// Check for any LHS / RHS with no series and fill in default values, if specified.
|
||||
Object.values(groups).forEach((mg) => {
|
||||
if (mg.lhs.length === 0 && matching.fillValues.lhs !== null) {
|
||||
mg.lhs.push({
|
||||
metric: mg.groupLabels,
|
||||
value: [0, formatPrometheusFloat(matching.fillValues.lhs as number)],
|
||||
filled: true,
|
||||
});
|
||||
mg.lhsCount = 1;
|
||||
}
|
||||
if (mg.rhs.length === 0 && matching.fillValues.rhs !== null) {
|
||||
mg.rhs.push({
|
||||
metric: mg.groupLabels,
|
||||
value: [0, formatPrometheusFloat(matching.fillValues.rhs as number)],
|
||||
filled: true,
|
||||
});
|
||||
mg.rhsCount = 1;
|
||||
}
|
||||
});
|
||||
|
||||
// Annotate the match groups with errors (if any) and populate the results.
|
||||
Object.values(groups).forEach((mg) => {
|
||||
switch (matching.card) {
|
||||
|
|
|
|||
|
|
@ -265,6 +265,7 @@ const formatNodeInternal = (
|
|||
case nodeType.binaryExpr: {
|
||||
let matching = <></>;
|
||||
let grouping = <></>;
|
||||
let fill = <></>;
|
||||
const vm = node.matching;
|
||||
if (vm !== null) {
|
||||
if (
|
||||
|
|
@ -305,6 +306,45 @@ const formatNodeInternal = (
|
|||
</>
|
||||
);
|
||||
}
|
||||
|
||||
const lfill = vm.fillValues.lhs;
|
||||
const rfill = vm.fillValues.rhs;
|
||||
if (lfill !== null || rfill !== null) {
|
||||
if (lfill === rfill) {
|
||||
fill = (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">fill</span>
|
||||
<span className="promql-paren">(</span>
|
||||
<span className="promql-number">{lfill}</span>
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
);
|
||||
} else {
|
||||
fill = (
|
||||
<>
|
||||
{lfill !== null && (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">fill_left</span>
|
||||
<span className="promql-paren">(</span>
|
||||
<span className="promql-number">{lfill}</span>
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
)}
|
||||
{rfill !== null && (
|
||||
<>
|
||||
{" "}
|
||||
<span className="promql-keyword">fill_right</span>
|
||||
<span className="promql-paren">(</span>
|
||||
<span className="promql-number">{rfill}</span>
|
||||
<span className="promql-paren">)</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
@ -327,7 +367,8 @@ const formatNodeInternal = (
|
|||
</>
|
||||
)}
|
||||
{matching}
|
||||
{grouping}{" "}
|
||||
{grouping}
|
||||
{fill}{" "}
|
||||
{showChildren &&
|
||||
formatNode(
|
||||
maybeParenthesizeBinopChild(node.op, node.rhs),
|
||||
|
|
|
|||
|
|
@ -135,6 +135,7 @@ const serializeNode = (
|
|||
case nodeType.binaryExpr: {
|
||||
let matching = "";
|
||||
let grouping = "";
|
||||
let fill = "";
|
||||
const vm = node.matching;
|
||||
if (vm !== null) {
|
||||
if (
|
||||
|
|
@ -152,11 +153,26 @@ const serializeNode = (
|
|||
) {
|
||||
grouping = ` group_${vm.card === vectorMatchCardinality.manyToOne ? "left" : "right"}(${labelNameList(vm.include)})`;
|
||||
}
|
||||
|
||||
const lfill = vm.fillValues.lhs;
|
||||
const rfill = vm.fillValues.rhs;
|
||||
if (lfill !== null || rfill !== null) {
|
||||
if (lfill === rfill) {
|
||||
fill = ` fill(${lfill})`;
|
||||
} else {
|
||||
if (lfill !== null) {
|
||||
fill += ` fill_left(${lfill})`;
|
||||
}
|
||||
if (rfill !== null) {
|
||||
fill += ` fill_right(${rfill})`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return `${serializeNode(maybeParenthesizeBinopChild(node.op, node.lhs), childIndent, pretty)}${childSeparator}${ind}${
|
||||
node.op
|
||||
}${node.bool ? " bool" : ""}${matching}${grouping}${childSeparator}${serializeNode(
|
||||
}${node.bool ? " bool" : ""}${matching}${grouping}${fill}${childSeparator}${serializeNode(
|
||||
maybeParenthesizeBinopChild(node.op, node.rhs),
|
||||
childIndent,
|
||||
pretty
|
||||
|
|
|
|||
|
|
@ -658,6 +658,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -677,6 +678,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: [],
|
||||
on: true,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -696,6 +698,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -715,6 +718,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: false,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -735,6 +739,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -755,6 +760,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: [],
|
||||
on: false,
|
||||
include: ["__name__"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -774,6 +780,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -793,6 +800,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -812,6 +820,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: [],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -831,6 +840,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: false,
|
||||
},
|
||||
|
|
@ -864,6 +874,7 @@ describe("serializeNode and formatNode", () => {
|
|||
labels: ["label1", "label2"],
|
||||
on: true,
|
||||
include: ["label3"],
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
bool: true,
|
||||
},
|
||||
|
|
@ -911,6 +922,7 @@ describe("serializeNode and formatNode", () => {
|
|||
include: ["c", "ü"],
|
||||
labels: ["b", "ö"],
|
||||
on: true,
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
op: binaryOperatorType.div,
|
||||
rhs: {
|
||||
|
|
@ -948,6 +960,7 @@ describe("serializeNode and formatNode", () => {
|
|||
include: [],
|
||||
labels: ["e", "ö"],
|
||||
on: false,
|
||||
fillValues: { lhs: null, rhs: null },
|
||||
},
|
||||
op: binaryOperatorType.add,
|
||||
rhs: {
|
||||
|
|
|
|||
|
|
@ -39,6 +39,10 @@ export const binOpModifierTerms = [
|
|||
{ label: 'ignoring', info: 'Ignore specified labels for matching', type: 'keyword' },
|
||||
{ label: 'group_left', info: 'Allow many-to-one matching', type: 'keyword' },
|
||||
{ label: 'group_right', info: 'Allow one-to-many matching', type: 'keyword' },
|
||||
{ label: 'bool', info: 'Return boolean result (0 or 1) instead of filtering', type: 'keyword' },
|
||||
{ label: 'fill', info: 'Fill in missing series on both sides', type: 'keyword' },
|
||||
{ label: 'fill_left', info: 'Fill in missing series on the left side', type: 'keyword' },
|
||||
{ label: 'fill_right', info: 'Fill in missing series on the right side', type: 'keyword' },
|
||||
];
|
||||
|
||||
export const atModifierTerms = [
|
||||
|
|
|
|||
|
|
@ -15,29 +15,31 @@ import { buildVectorMatching } from './vector';
|
|||
import { createEditorState } from '../test/utils-test';
|
||||
import { BinaryExpr } from '@prometheus-io/lezer-promql';
|
||||
import { syntaxTree } from '@codemirror/language';
|
||||
import { VectorMatchCardinality } from '../types';
|
||||
import { VectorMatchCardinality, VectorMatching } from '../types';
|
||||
|
||||
const noFill = { fill: { lhs: null, rhs: null } };
|
||||
|
||||
describe('buildVectorMatching test', () => {
|
||||
const testCases = [
|
||||
const testCases: { binaryExpr: string; expectedVectorMatching: VectorMatching }[] = [
|
||||
{
|
||||
binaryExpr: 'foo * bar',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo * sum',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo == 1',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo == bool 1',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill },
|
||||
},
|
||||
{
|
||||
binaryExpr: '2.5 / bar',
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [] },
|
||||
expectedVectorMatching: { card: VectorMatchCardinality.CardOneToOne, matchingLabels: [], on: false, include: [], ...noFill },
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo and bar',
|
||||
|
|
@ -46,6 +48,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -55,6 +58,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -64,6 +68,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -75,6 +80,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -86,6 +92,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -95,6 +102,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -104,6 +112,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -113,6 +122,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -122,6 +132,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: true,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -131,6 +142,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -140,6 +152,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -149,6 +162,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['bar'],
|
||||
on: true,
|
||||
include: [],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -158,6 +172,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: ['bar'],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -167,6 +182,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['blub'],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -176,6 +192,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['bar'],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -185,6 +202,7 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: true,
|
||||
include: ['bar', 'foo'],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -194,6 +212,57 @@ describe('buildVectorMatching test', () => {
|
|||
matchingLabels: ['test', 'blub'],
|
||||
on: false,
|
||||
include: ['bar', 'foo'],
|
||||
...noFill,
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo + fill(23) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: { lhs: 23, rhs: 23 },
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo + fill_left(23) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: { lhs: 23, rhs: null },
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo + fill_right(23) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: { lhs: null, rhs: 23 },
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo + fill_left(23) fill_right(42) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: { lhs: 23, rhs: 42 },
|
||||
},
|
||||
},
|
||||
{
|
||||
binaryExpr: 'foo + fill_right(23) fill_left(42) bar',
|
||||
expectedVectorMatching: {
|
||||
card: VectorMatchCardinality.CardOneToOne,
|
||||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: { lhs: 42, rhs: 23 },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
|
@ -203,7 +272,7 @@ describe('buildVectorMatching test', () => {
|
|||
const node = syntaxTree(state).topNode.getChild(BinaryExpr);
|
||||
expect(node).toBeTruthy();
|
||||
if (node) {
|
||||
expect(value.expectedVectorMatching).toEqual(buildVectorMatching(state, node));
|
||||
expect(buildVectorMatching(state, node)).toEqual(value.expectedVectorMatching);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -24,6 +24,11 @@ import {
|
|||
On,
|
||||
Or,
|
||||
Unless,
|
||||
NumberDurationLiteral,
|
||||
FillModifier,
|
||||
FillClause,
|
||||
FillLeftClause,
|
||||
FillRightClause,
|
||||
} from '@prometheus-io/lezer-promql';
|
||||
import { VectorMatchCardinality, VectorMatching } from '../types';
|
||||
import { containsAtLeastOneChild } from './path-finder';
|
||||
|
|
@ -37,6 +42,10 @@ export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode):
|
|||
matchingLabels: [],
|
||||
on: false,
|
||||
include: [],
|
||||
fill: {
|
||||
lhs: null,
|
||||
rhs: null,
|
||||
},
|
||||
};
|
||||
const modifierClause = binaryNode.getChild(MatchingModifierClause);
|
||||
if (modifierClause) {
|
||||
|
|
@ -60,6 +69,32 @@ export function buildVectorMatching(state: EditorState, binaryNode: SyntaxNode):
|
|||
}
|
||||
}
|
||||
|
||||
const fillModifier = binaryNode.getChild(FillModifier);
|
||||
if (fillModifier) {
|
||||
const fill = fillModifier.getChild(FillClause);
|
||||
const fillLeft = fillModifier.getChild(FillLeftClause);
|
||||
const fillRight = fillModifier.getChild(FillRightClause);
|
||||
|
||||
const getFillValue = (node: SyntaxNode) => {
|
||||
const valueNode = node.getChild(NumberDurationLiteral);
|
||||
return valueNode ? parseFloat(state.sliceDoc(valueNode.from, valueNode.to)) : null;
|
||||
};
|
||||
|
||||
if (fill) {
|
||||
const value = getFillValue(fill);
|
||||
result.fill.lhs = value;
|
||||
result.fill.rhs = value;
|
||||
}
|
||||
|
||||
if (fillLeft) {
|
||||
result.fill.lhs = getFillValue(fillLeft);
|
||||
}
|
||||
|
||||
if (fillRight) {
|
||||
result.fill.rhs = getFillValue(fillRight);
|
||||
}
|
||||
}
|
||||
|
||||
const isSetOperator = containsAtLeastOneChild(binaryNode, And, Or, Unless);
|
||||
if (isSetOperator && result.card === VectorMatchCardinality.CardOneToOne) {
|
||||
result.card = VectorMatchCardinality.CardManyToMany;
|
||||
|
|
|
|||
|
|
@ -18,6 +18,11 @@ export enum VectorMatchCardinality {
|
|||
CardManyToMany = 'many-to-many',
|
||||
}
|
||||
|
||||
export interface FillValues {
|
||||
lhs: number | null;
|
||||
rhs: number | null;
|
||||
}
|
||||
|
||||
export interface VectorMatching {
|
||||
// The cardinality of the two Vectors.
|
||||
card: VectorMatchCardinality;
|
||||
|
|
@ -30,4 +35,6 @@ export interface VectorMatching {
|
|||
// Include contains additional labels that should be included in
|
||||
// the result from the side with the lower cardinality.
|
||||
include: string[];
|
||||
// Fill contains optional fill values for missing elements.
|
||||
fill: FillValues;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,11 +101,30 @@ MatchingModifierClause {
|
|||
((GroupLeft | GroupRight) (!group GroupingLabels)?)?
|
||||
}
|
||||
|
||||
FillClause {
|
||||
Fill "(" NumberDurationLiteral ")"
|
||||
}
|
||||
|
||||
FillLeftClause {
|
||||
FillLeft "(" NumberDurationLiteral ")"
|
||||
}
|
||||
|
||||
FillRightClause {
|
||||
FillRight "(" NumberDurationLiteral ")"
|
||||
}
|
||||
|
||||
FillModifier {
|
||||
(FillClause | FillLeftClause | FillRightClause) |
|
||||
(FillLeftClause FillRightClause) |
|
||||
(FillRightClause FillLeftClause)
|
||||
}
|
||||
|
||||
BoolModifier { Bool }
|
||||
|
||||
binModifiers {
|
||||
BoolModifier?
|
||||
MatchingModifierClause?
|
||||
FillModifier?
|
||||
}
|
||||
|
||||
GroupingLabels {
|
||||
|
|
@ -366,7 +385,10 @@ NumberDurationLiteralInDurationContext {
|
|||
Start,
|
||||
End,
|
||||
Smoothed,
|
||||
Anchored
|
||||
Anchored,
|
||||
Fill,
|
||||
FillLeft,
|
||||
FillRight
|
||||
}
|
||||
|
||||
@external propSource promQLHighLight from "./highlight"
|
||||
|
|
|
|||
|
|
@ -12,82 +12,88 @@
|
|||
// limitations under the License.
|
||||
|
||||
import {
|
||||
And,
|
||||
Avg,
|
||||
Atan2,
|
||||
Bool,
|
||||
Bottomk,
|
||||
By,
|
||||
Count,
|
||||
CountValues,
|
||||
End,
|
||||
Group,
|
||||
GroupLeft,
|
||||
GroupRight,
|
||||
Ignoring,
|
||||
inf,
|
||||
Max,
|
||||
Min,
|
||||
nan,
|
||||
Offset,
|
||||
On,
|
||||
Or,
|
||||
Quantile,
|
||||
LimitK,
|
||||
LimitRatio,
|
||||
Start,
|
||||
Stddev,
|
||||
Stdvar,
|
||||
Sum,
|
||||
Topk,
|
||||
Unless,
|
||||
Without,
|
||||
Smoothed,
|
||||
Anchored,
|
||||
} from './parser.terms.js';
|
||||
And,
|
||||
Avg,
|
||||
Atan2,
|
||||
Bool,
|
||||
Bottomk,
|
||||
By,
|
||||
Count,
|
||||
CountValues,
|
||||
End,
|
||||
Group,
|
||||
GroupLeft,
|
||||
GroupRight,
|
||||
Ignoring,
|
||||
inf,
|
||||
Max,
|
||||
Min,
|
||||
nan,
|
||||
Offset,
|
||||
On,
|
||||
Or,
|
||||
Quantile,
|
||||
LimitK,
|
||||
LimitRatio,
|
||||
Start,
|
||||
Stddev,
|
||||
Stdvar,
|
||||
Sum,
|
||||
Topk,
|
||||
Unless,
|
||||
Without,
|
||||
Smoothed,
|
||||
Anchored,
|
||||
Fill,
|
||||
FillLeft,
|
||||
FillRight,
|
||||
} from "./parser.terms.js";
|
||||
|
||||
const keywordTokens = {
|
||||
inf: inf,
|
||||
nan: nan,
|
||||
bool: Bool,
|
||||
ignoring: Ignoring,
|
||||
on: On,
|
||||
group_left: GroupLeft,
|
||||
group_right: GroupRight,
|
||||
offset: Offset,
|
||||
inf: inf,
|
||||
nan: nan,
|
||||
bool: Bool,
|
||||
ignoring: Ignoring,
|
||||
on: On,
|
||||
group_left: GroupLeft,
|
||||
group_right: GroupRight,
|
||||
offset: Offset,
|
||||
};
|
||||
|
||||
export const specializeIdentifier = (value, stack) => {
|
||||
return keywordTokens[value.toLowerCase()] || -1;
|
||||
return keywordTokens[value.toLowerCase()] || -1;
|
||||
};
|
||||
|
||||
const contextualKeywordTokens = {
|
||||
avg: Avg,
|
||||
atan2: Atan2,
|
||||
bottomk: Bottomk,
|
||||
count: Count,
|
||||
count_values: CountValues,
|
||||
group: Group,
|
||||
max: Max,
|
||||
min: Min,
|
||||
quantile: Quantile,
|
||||
limitk: LimitK,
|
||||
limit_ratio: LimitRatio,
|
||||
stddev: Stddev,
|
||||
stdvar: Stdvar,
|
||||
sum: Sum,
|
||||
topk: Topk,
|
||||
by: By,
|
||||
without: Without,
|
||||
and: And,
|
||||
or: Or,
|
||||
unless: Unless,
|
||||
start: Start,
|
||||
end: End,
|
||||
smoothed: Smoothed,
|
||||
anchored: Anchored,
|
||||
avg: Avg,
|
||||
atan2: Atan2,
|
||||
bottomk: Bottomk,
|
||||
count: Count,
|
||||
count_values: CountValues,
|
||||
group: Group,
|
||||
max: Max,
|
||||
min: Min,
|
||||
quantile: Quantile,
|
||||
limitk: LimitK,
|
||||
limit_ratio: LimitRatio,
|
||||
stddev: Stddev,
|
||||
stdvar: Stdvar,
|
||||
sum: Sum,
|
||||
topk: Topk,
|
||||
by: By,
|
||||
without: Without,
|
||||
and: And,
|
||||
or: Or,
|
||||
unless: Unless,
|
||||
start: Start,
|
||||
end: End,
|
||||
smoothed: Smoothed,
|
||||
anchored: Anchored,
|
||||
fill: Fill,
|
||||
fill_left: FillLeft,
|
||||
fill_right: FillRight,
|
||||
};
|
||||
|
||||
export const extendIdentifier = (value, stack) => {
|
||||
return contextualKeywordTokens[value.toLowerCase()] || -1;
|
||||
return contextualKeywordTokens[value.toLowerCase()] || -1;
|
||||
};
|
||||
|
|
|
|||
Loading…
Reference in a new issue