Elasticsearch: Decoupling from core (#115900)

* Complete decoupling of backend

- Replace usage of featuremgmt
- Copy simplejson
- Add standalone logic

* Complete frontend decoupling

- Fix imports
- Copy store and reducer logic

* Add required files for full decoupling

* Regen cue

* Prettier

* Remove unneeded script

* Jest fix

* Add jest config

* Lint

* Lit

* Prune suppresions
This commit is contained in:
Andreas Christou 2026-01-14 13:54:21 +01:00 committed by GitHub
parent 7143324229
commit c1a46fdcb5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 1378 additions and 128 deletions

View file

@ -121,6 +121,8 @@ linters:
- '**/pkg/tsdb/zipkin/**/*'
- '**/pkg/tsdb/jaeger/*'
- '**/pkg/tsdb/jaeger/**/*'
- '**/pkg/tsdb/elasticsearch/*'
- '**/pkg/tsdb/elasticsearch/**/*'
deny:
- pkg: github.com/grafana/grafana/pkg/api
desc: Core plugins are not allowed to depend on Grafana core packages

View file

@ -3743,46 +3743,21 @@
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/DateHistogramSettingsEditor.tsx": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/TermsSettingsEditor.tsx": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/aggregations.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/state/reducer.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/SettingField.tsx": {
"@typescript-eslint/consistent-type-assertions": {
"count": 2
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/aggregations.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/state/reducer.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/plugins/datasource/elasticsearch/configuration/DataLinks.tsx": {
"no-restricted-syntax": {
"count": 1

View file

@ -82,6 +82,7 @@ module.exports = {
// Decoupled plugins run their own tests so ignoring them here.
'<rootDir>/public/app/plugins/datasource/azuremonitor',
'<rootDir>/public/app/plugins/datasource/cloud-monitoring',
'<rootDir>/public/app/plugins/datasource/elasticsearch',
'<rootDir>/public/app/plugins/datasource/grafana-postgresql-datasource',
'<rootDir>/public/app/plugins/datasource/grafana-pyroscope-datasource',
'<rootDir>/public/app/plugins/datasource/grafana-testdata-datasource',

View file

@ -10,7 +10,7 @@
import * as common from '@grafana/schema';
export const pluginVersion = "12.4.0-pre";
export const pluginVersion = "%VERSION%";
export type BucketAggregation = (DateHistogram | Histogram | Terms | Filters | GeoHashGrid | Nested);

View file

@ -639,7 +639,7 @@
]
},
"dependencies": {
"grafanaDependency": "",
"grafanaDependency": "\u003e=11.6.0",
"grafanaVersion": "*",
"plugins": [],
"extensions": {

View file

@ -3,8 +3,8 @@ package elasticsearch
import (
"regexp"
"github.com/grafana/grafana/pkg/components/simplejson"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// addDateHistogramAgg adds a date histogram aggregation to the aggregation builder

View file

@ -16,7 +16,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
)
// Used in logging to mark a stage
@ -160,7 +159,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
resSpan.End()
}()
improvedParsingEnabled := isFeatureEnabled(c.ctx, featuremgmt.FlagElasticsearchImprovedParsing)
improvedParsingEnabled := isFeatureEnabled(c.ctx, "elasticsearchImprovedParsing")
msr, err := c.parser.parseMultiSearchResponse(res.Body, improvedParsingEnabled)
if err != nil {
return nil, err

View file

@ -15,7 +15,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
func TestClient_ExecuteMultisearch(t *testing.T) {

View file

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
func TestSearchRequest(t *testing.T) {

View file

@ -6,8 +6,8 @@ import (
"strconv"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// processQuery processes a single query and adds it to the multi-search request builder

View file

@ -3,7 +3,7 @@ package elasticsearch
import (
"strconv"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// setFloatPath converts a string value at the specified path to float64

View file

@ -9,7 +9,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// metricsResponseProcessor handles processing of metrics query responses

View file

@ -4,7 +4,7 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// Query represents the time series query model of the datasource

View file

@ -6,7 +6,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
func parseQuery(tsdbQuery []backend.DataQuery, logger log.Logger) ([]*Query, error) {

View file

@ -5,7 +5,7 @@ import (
"fmt"
"strconv"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// AggregationParser parses raw Elasticsearch DSL aggregations

View file

@ -15,9 +15,9 @@ import (
"go.opentelemetry.io/otel/codes"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/components/simplejson"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/instrumentation"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
const (

View file

@ -7,8 +7,8 @@ import (
"strings"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
)
// flatten flattens multi-level objects to single level objects. It uses dot notation to join keys.

View file

@ -0,0 +1,582 @@
// Package simplejson provides a wrapper for arbitrary JSON objects that adds methods to access properties.
// Use of this package in place of types and the standard library's encoding/json package is strongly discouraged.
//
// Don't lint for stale code, since it's a copied library and we might as well keep the whole thing.
// nolint:unused
package simplejson
import (
"bytes"
"database/sql/driver"
"encoding/json"
"errors"
"fmt"
"log"
)
// returns the current implementation version
func Version() string {
return "0.5.0"
}
type Json struct {
data any
}
func (j *Json) FromDB(data []byte) error {
j.data = make(map[string]any)
dec := json.NewDecoder(bytes.NewBuffer(data))
dec.UseNumber()
return dec.Decode(&j.data)
}
func (j *Json) ToDB() ([]byte, error) {
if j == nil || j.data == nil {
return nil, nil
}
return j.Encode()
}
func (j *Json) Scan(val any) error {
switch v := val.(type) {
case []byte:
if len(v) == 0 {
return nil
}
return json.Unmarshal(v, &j)
case string:
if len(v) == 0 {
return nil
}
return json.Unmarshal([]byte(v), &j)
default:
return fmt.Errorf("unsupported type: %T", v)
}
}
func (j *Json) Value() (driver.Value, error) {
return j.ToDB()
}
// DeepCopyInto creates a copy by serializing JSON
func (j *Json) DeepCopyInto(out *Json) {
b, err := j.Encode()
if err == nil {
_ = out.UnmarshalJSON(b)
}
}
// DeepCopy will make a deep copy of the JSON object
func (j *Json) DeepCopy() *Json {
if j == nil {
return nil
}
out := new(Json)
j.DeepCopyInto(out)
return out
}
// NewJson returns a pointer to a new `Json` object
// after unmarshaling `body` bytes
func NewJson(body []byte) (*Json, error) {
j := new(Json)
err := j.UnmarshalJSON(body)
if err != nil {
return nil, err
}
return j, nil
}
// MustJson returns a pointer to a new `Json` object, panicking if `body` cannot be parsed.
func MustJson(body []byte) *Json {
j, err := NewJson(body)
if err != nil {
panic(fmt.Sprintf("could not unmarshal JSON: %q", err))
}
return j
}
// New returns a pointer to a new, empty `Json` object
func New() *Json {
return &Json{
data: make(map[string]any),
}
}
// NewFromAny returns a pointer to a new `Json` object with provided data.
func NewFromAny(data any) *Json {
return &Json{data: data}
}
// Interface returns the underlying data
func (j *Json) Interface() any {
return j.data
}
// Encode returns its marshaled data as `[]byte`
func (j *Json) Encode() ([]byte, error) {
return j.MarshalJSON()
}
// EncodePretty returns its marshaled data as `[]byte` with indentation
func (j *Json) EncodePretty() ([]byte, error) {
return json.MarshalIndent(&j.data, "", " ")
}
// Implements the json.Marshaler interface.
func (j *Json) MarshalJSON() ([]byte, error) {
return json.Marshal(&j.data)
}
// Set modifies `Json` map by `key` and `value`
// Useful for changing single key/value in a `Json` object easily.
func (j *Json) Set(key string, val any) {
m, err := j.Map()
if err != nil {
return
}
m[key] = val
}
// SetPath modifies `Json`, recursively checking/creating map keys for the supplied path,
// and then finally writing in the value
func (j *Json) SetPath(branch []string, val any) {
if len(branch) == 0 {
j.data = val
return
}
// in order to insert our branch, we need map[string]any
if _, ok := (j.data).(map[string]any); !ok {
// have to replace with something suitable
j.data = make(map[string]any)
}
curr := j.data.(map[string]any)
for i := 0; i < len(branch)-1; i++ {
b := branch[i]
// key exists?
if _, ok := curr[b]; !ok {
n := make(map[string]any)
curr[b] = n
curr = n
continue
}
// make sure the value is the right sort of thing
if _, ok := curr[b].(map[string]any); !ok {
// have to replace with something suitable
n := make(map[string]any)
curr[b] = n
}
curr = curr[b].(map[string]any)
}
// add remaining k/v
curr[branch[len(branch)-1]] = val
}
// Del modifies `Json` map by deleting `key` if it is present.
func (j *Json) Del(key string) {
m, err := j.Map()
if err != nil {
return
}
delete(m, key)
}
// Get returns a pointer to a new `Json` object
// for `key` in its `map` representation
//
// useful for chaining operations (to traverse a nested JSON):
//
// js.Get("top_level").Get("dict").Get("value").Int()
func (j *Json) Get(key string) *Json {
m, err := j.Map()
if err == nil {
if val, ok := m[key]; ok {
return &Json{val}
}
}
return &Json{nil}
}
// GetPath searches for the item as specified by the branch
// without the need to deep dive using Get()'s.
//
// js.GetPath("top_level", "dict")
func (j *Json) GetPath(branch ...string) *Json {
jin := j
for _, p := range branch {
jin = jin.Get(p)
}
return jin
}
// GetIndex returns a pointer to a new `Json` object
// for `index` in its `array` representation
//
// this is the analog to Get when accessing elements of
// a json array instead of a json object:
//
// js.Get("top_level").Get("array").GetIndex(1).Get("key").Int()
func (j *Json) GetIndex(index int) *Json {
a, err := j.Array()
if err == nil {
if len(a) > index {
return &Json{a[index]}
}
}
return &Json{nil}
}
// CheckGetIndex returns a pointer to a new `Json` object
// for `index` in its `array` representation, and a `bool`
// indicating success or failure
//
// useful for chained operations when success is important:
//
// if data, ok := js.Get("top_level").CheckGetIndex(0); ok {
// log.Println(data)
// }
func (j *Json) CheckGetIndex(index int) (*Json, bool) {
a, err := j.Array()
if err == nil {
if len(a) > index {
return &Json{a[index]}, true
}
}
return nil, false
}
// SetIndex modifies `Json` array by `index` and `value`
// for `index` in its `array` representation
func (j *Json) SetIndex(index int, val any) {
a, err := j.Array()
if err == nil {
if len(a) > index {
a[index] = val
}
}
}
// CheckGet returns a pointer to a new `Json` object and
// a `bool` identifying success or failure
//
// useful for chained operations when success is important:
//
// if data, ok := js.Get("top_level").CheckGet("inner"); ok {
// log.Println(data)
// }
func (j *Json) CheckGet(key string) (*Json, bool) {
m, err := j.Map()
if err == nil {
if val, ok := m[key]; ok {
return &Json{val}, true
}
}
return nil, false
}
// Map type asserts to `map`
func (j *Json) Map() (map[string]any, error) {
if m, ok := (j.data).(map[string]any); ok {
return m, nil
}
return nil, errors.New("type assertion to map[string]any failed")
}
// Array type asserts to an `array`
func (j *Json) Array() ([]any, error) {
if a, ok := (j.data).([]any); ok {
return a, nil
}
return nil, errors.New("type assertion to []any failed")
}
// Bool type asserts to `bool`
func (j *Json) Bool() (bool, error) {
if s, ok := (j.data).(bool); ok {
return s, nil
}
return false, errors.New("type assertion to bool failed")
}
// String type asserts to `string`
func (j *Json) String() (string, error) {
if s, ok := (j.data).(string); ok {
return s, nil
}
return "", errors.New("type assertion to string failed")
}
// Bytes type asserts to `[]byte`
func (j *Json) Bytes() ([]byte, error) {
if s, ok := (j.data).(string); ok {
return []byte(s), nil
}
return nil, errors.New("type assertion to []byte failed")
}
// StringArray type asserts to an `array` of `string`
func (j *Json) StringArray() ([]string, error) {
arr, err := j.Array()
if err != nil {
return nil, err
}
retArr := make([]string, 0, len(arr))
for _, a := range arr {
if a == nil {
retArr = append(retArr, "")
continue
}
s, ok := a.(string)
if !ok {
return nil, err
}
retArr = append(retArr, s)
}
return retArr, nil
}
// MustArray guarantees the return of a `[]any` (with optional default)
//
// useful when you want to iterate over array values in a succinct manner:
//
// for i, v := range js.Get("results").MustArray() {
// fmt.Println(i, v)
// }
func (j *Json) MustArray(args ...[]any) []any {
var def []any
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustArray() received too many arguments %d", len(args))
}
a, err := j.Array()
if err == nil {
return a
}
return def
}
// MustMap guarantees the return of a `map[string]any` (with optional default)
//
// useful when you want to iterate over map values in a succinct manner:
//
// for k, v := range js.Get("dictionary").MustMap() {
// fmt.Println(k, v)
// }
func (j *Json) MustMap(args ...map[string]any) map[string]any {
var def map[string]any
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustMap() received too many arguments %d", len(args))
}
a, err := j.Map()
if err == nil {
return a
}
return def
}
// MustString guarantees the return of a `string` (with optional default)
//
// useful when you explicitly want a `string` in a single value return context:
//
// myFunc(js.Get("param1").MustString(), js.Get("optional_param").MustString("my_default"))
func (j *Json) MustString(args ...string) string {
var def string
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustString() received too many arguments %d", len(args))
}
s, err := j.String()
if err == nil {
return s
}
return def
}
// MustStringArray guarantees the return of a `[]string` (with optional default)
//
// useful when you want to iterate over array values in a succinct manner:
//
// for i, s := range js.Get("results").MustStringArray() {
// fmt.Println(i, s)
// }
func (j *Json) MustStringArray(args ...[]string) []string {
var def []string
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustStringArray() received too many arguments %d", len(args))
}
a, err := j.StringArray()
if err == nil {
return a
}
return def
}
// MustInt guarantees the return of an `int` (with optional default)
//
// useful when you explicitly want an `int` in a single value return context:
//
// myFunc(js.Get("param1").MustInt(), js.Get("optional_param").MustInt(5150))
func (j *Json) MustInt(args ...int) int {
var def int
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustInt() received too many arguments %d", len(args))
}
i, err := j.Int()
if err == nil {
return i
}
return def
}
// MustFloat64 guarantees the return of a `float64` (with optional default)
//
// useful when you explicitly want a `float64` in a single value return context:
//
// myFunc(js.Get("param1").MustFloat64(), js.Get("optional_param").MustFloat64(5.150))
func (j *Json) MustFloat64(args ...float64) float64 {
var def float64
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustFloat64() received too many arguments %d", len(args))
}
f, err := j.Float64()
if err == nil {
return f
}
return def
}
// MustBool guarantees the return of a `bool` (with optional default)
//
// useful when you explicitly want a `bool` in a single value return context:
//
// myFunc(js.Get("param1").MustBool(), js.Get("optional_param").MustBool(true))
func (j *Json) MustBool(args ...bool) bool {
var def bool
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustBool() received too many arguments %d", len(args))
}
b, err := j.Bool()
if err == nil {
return b
}
return def
}
// MustInt64 guarantees the return of an `int64` (with optional default)
//
// useful when you explicitly want an `int64` in a single value return context:
//
// myFunc(js.Get("param1").MustInt64(), js.Get("optional_param").MustInt64(5150))
func (j *Json) MustInt64(args ...int64) int64 {
var def int64
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustInt64() received too many arguments %d", len(args))
}
i, err := j.Int64()
if err == nil {
return i
}
return def
}
// MustUInt64 guarantees the return of an `uint64` (with optional default)
//
// useful when you explicitly want an `uint64` in a single value return context:
//
// myFunc(js.Get("param1").MustUint64(), js.Get("optional_param").MustUint64(5150))
func (j *Json) MustUint64(args ...uint64) uint64 {
var def uint64
switch len(args) {
case 0:
case 1:
def = args[0]
default:
log.Panicf("MustUint64() received too many arguments %d", len(args))
}
i, err := j.Uint64()
if err == nil {
return i
}
return def
}
// MarshalYAML implements yaml.Marshaller.
func (j *Json) MarshalYAML() (any, error) {
return j.data, nil
}
// UnmarshalYAML implements yaml.Unmarshaller.
func (j *Json) UnmarshalYAML(unmarshal func(any) error) error {
var data any
if err := unmarshal(&data); err != nil {
return err
}
j.data = data
return nil
}

View file

@ -0,0 +1,90 @@
package simplejson
import (
"bytes"
"encoding/json"
"errors"
"io"
"reflect"
"strconv"
)
// Implements the json.Unmarshaler interface.
func (j *Json) UnmarshalJSON(p []byte) error {
dec := json.NewDecoder(bytes.NewBuffer(p))
dec.UseNumber()
return dec.Decode(&j.data)
}
// NewFromReader returns a *Json by decoding from an io.Reader
func NewFromReader(r io.Reader) (*Json, error) {
j := new(Json)
dec := json.NewDecoder(r)
dec.UseNumber()
err := dec.Decode(&j.data)
return j, err
}
// Float64 coerces into a float64
func (j *Json) Float64() (float64, error) {
switch n := j.data.(type) {
case json.Number:
return n.Float64()
case float32, float64:
return reflect.ValueOf(j.data).Float(), nil
case int, int8, int16, int32, int64:
return float64(reflect.ValueOf(j.data).Int()), nil
case uint, uint8, uint16, uint32, uint64:
return float64(reflect.ValueOf(j.data).Uint()), nil
}
return 0, errors.New("invalid value type")
}
// Int coerces into an int
func (j *Json) Int() (int, error) {
switch n := j.data.(type) {
case json.Number:
i, err := n.Int64()
if err != nil {
return 0, err
}
return int(i), nil
case float32, float64:
return int(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:
return int(reflect.ValueOf(j.data).Int()), nil
case uint, uint8, uint16, uint32, uint64:
return int(reflect.ValueOf(j.data).Uint()), nil
}
return 0, errors.New("invalid value type")
}
// Int64 coerces into an int64
func (j *Json) Int64() (int64, error) {
switch n := j.data.(type) {
case json.Number:
return n.Int64()
case float32, float64:
return int64(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:
return reflect.ValueOf(j.data).Int(), nil
case uint, uint8, uint16, uint32, uint64:
return int64(reflect.ValueOf(j.data).Uint()), nil
}
return 0, errors.New("invalid value type")
}
// Uint64 coerces into an uint64
func (j *Json) Uint64() (uint64, error) {
switch n := j.data.(type) {
case json.Number:
return strconv.ParseUint(n.String(), 10, 64)
case float32, float64:
return uint64(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:
return uint64(reflect.ValueOf(j.data).Int()), nil
case uint, uint8, uint16, uint32, uint64:
return reflect.ValueOf(j.data).Uint(), nil
}
return 0, errors.New("invalid value type")
}

View file

@ -0,0 +1,274 @@
package simplejson
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSimplejson(t *testing.T) {
var ok bool
var err error
js, err := NewJson([]byte(`{
"test": {
"string_array": ["asdf", "ghjk", "zxcv"],
"string_array_null": ["abc", null, "efg"],
"array": [1, "2", 3],
"arraywithsubs": [{"subkeyone": 1},
{"subkeytwo": 2, "subkeythree": 3}],
"int": 10,
"float": 5.150,
"string": "simplejson",
"bool": true,
"sub_obj": {"a": 1}
}
}`))
assert.NotEqual(t, nil, js)
assert.Equal(t, nil, err)
_, ok = js.CheckGet("test")
assert.Equal(t, true, ok)
_, ok = js.CheckGet("missing_key")
assert.Equal(t, false, ok)
aws := js.Get("test").Get("arraywithsubs")
assert.NotEqual(t, nil, aws)
var awsval int
awsval, _ = aws.GetIndex(0).Get("subkeyone").Int()
assert.Equal(t, 1, awsval)
awsval, _ = aws.GetIndex(1).Get("subkeytwo").Int()
assert.Equal(t, 2, awsval)
awsval, _ = aws.GetIndex(1).Get("subkeythree").Int()
assert.Equal(t, 3, awsval)
arr := js.Get("test").Get("array")
assert.NotEqual(t, nil, arr)
val, ok := arr.CheckGetIndex(0)
assert.Equal(t, ok, true)
valInt, _ := val.Int()
assert.Equal(t, valInt, 1)
val, ok = arr.CheckGetIndex(1)
assert.Equal(t, ok, true)
valStr, _ := val.String()
assert.Equal(t, valStr, "2")
val, ok = arr.CheckGetIndex(2)
assert.Equal(t, ok, true)
valInt, _ = val.Int()
assert.Equal(t, valInt, 3)
_, ok = arr.CheckGetIndex(3)
assert.Equal(t, ok, false)
i, _ := js.Get("test").Get("int").Int()
assert.Equal(t, 10, i)
f, _ := js.Get("test").Get("float").Float64()
assert.Equal(t, 5.150, f)
s, _ := js.Get("test").Get("string").String()
assert.Equal(t, "simplejson", s)
b, _ := js.Get("test").Get("bool").Bool()
assert.Equal(t, true, b)
mi := js.Get("test").Get("int").MustInt()
assert.Equal(t, 10, mi)
mi2 := js.Get("test").Get("missing_int").MustInt(5150)
assert.Equal(t, 5150, mi2)
ms := js.Get("test").Get("string").MustString()
assert.Equal(t, "simplejson", ms)
ms2 := js.Get("test").Get("missing_string").MustString("fyea")
assert.Equal(t, "fyea", ms2)
ma2 := js.Get("test").Get("missing_array").MustArray([]any{"1", 2, "3"})
assert.Equal(t, ma2, []any{"1", 2, "3"})
msa := js.Get("test").Get("string_array").MustStringArray()
assert.Equal(t, msa[0], "asdf")
assert.Equal(t, msa[1], "ghjk")
assert.Equal(t, msa[2], "zxcv")
msa2 := js.Get("test").Get("string_array").MustStringArray([]string{"1", "2", "3"})
assert.Equal(t, msa2[0], "asdf")
assert.Equal(t, msa2[1], "ghjk")
assert.Equal(t, msa2[2], "zxcv")
msa3 := js.Get("test").Get("missing_array").MustStringArray([]string{"1", "2", "3"})
assert.Equal(t, msa3, []string{"1", "2", "3"})
mm2 := js.Get("test").Get("missing_map").MustMap(map[string]any{"found": false})
assert.Equal(t, mm2, map[string]any{"found": false})
strs, err := js.Get("test").Get("string_array").StringArray()
assert.Equal(t, err, nil)
assert.Equal(t, strs[0], "asdf")
assert.Equal(t, strs[1], "ghjk")
assert.Equal(t, strs[2], "zxcv")
strs2, err := js.Get("test").Get("string_array_null").StringArray()
assert.Equal(t, err, nil)
assert.Equal(t, strs2[0], "abc")
assert.Equal(t, strs2[1], "")
assert.Equal(t, strs2[2], "efg")
gp, _ := js.GetPath("test", "string").String()
assert.Equal(t, "simplejson", gp)
gp2, _ := js.GetPath("test", "int").Int()
assert.Equal(t, 10, gp2)
assert.Equal(t, js.Get("test").Get("bool").MustBool(), true)
js.Set("float2", 300.0)
assert.Equal(t, js.Get("float2").MustFloat64(), 300.0)
js.Set("test2", "setTest")
assert.Equal(t, "setTest", js.Get("test2").MustString())
js.Del("test2")
assert.NotEqual(t, "setTest", js.Get("test2").MustString())
js.Get("test").Get("sub_obj").Set("a", 2)
assert.Equal(t, 2, js.Get("test").Get("sub_obj").Get("a").MustInt())
js.GetPath("test", "sub_obj").Set("a", 3)
assert.Equal(t, 3, js.GetPath("test", "sub_obj", "a").MustInt())
}
func TestStdlibInterfaces(t *testing.T) {
val := new(struct {
Name string `json:"name"`
Params *Json `json:"params"`
})
val2 := new(struct {
Name string `json:"name"`
Params *Json `json:"params"`
})
raw := `{"name":"myobject","params":{"string":"simplejson"}}`
assert.Equal(t, nil, json.Unmarshal([]byte(raw), val))
assert.Equal(t, "myobject", val.Name)
assert.NotEqual(t, nil, val.Params.data)
s, _ := val.Params.Get("string").String()
assert.Equal(t, "simplejson", s)
p, err := json.Marshal(val)
assert.Equal(t, nil, err)
assert.Equal(t, nil, json.Unmarshal(p, val2))
assert.Equal(t, val, val2) // stable
}
func TestSet(t *testing.T) {
js, err := NewJson([]byte(`{}`))
assert.Equal(t, nil, err)
js.Set("baz", "bing")
s, err := js.GetPath("baz").String()
assert.Equal(t, nil, err)
assert.Equal(t, "bing", s)
}
func TestReplace(t *testing.T) {
js, err := NewJson([]byte(`{}`))
assert.Equal(t, nil, err)
err = js.UnmarshalJSON([]byte(`{"baz":"bing"}`))
assert.Equal(t, nil, err)
s, err := js.GetPath("baz").String()
assert.Equal(t, nil, err)
assert.Equal(t, "bing", s)
}
func TestSetPath(t *testing.T) {
js, err := NewJson([]byte(`{}`))
assert.Equal(t, nil, err)
js.SetPath([]string{"foo", "bar"}, "baz")
s, err := js.GetPath("foo", "bar").String()
assert.Equal(t, nil, err)
assert.Equal(t, "baz", s)
}
func TestSetPathNoPath(t *testing.T) {
js, err := NewJson([]byte(`{"some":"data","some_number":1.0,"some_bool":false}`))
assert.Equal(t, nil, err)
f := js.GetPath("some_number").MustFloat64(99.0)
assert.Equal(t, f, 1.0)
js.SetPath([]string{}, map[string]any{"foo": "bar"})
s, err := js.GetPath("foo").String()
assert.Equal(t, nil, err)
assert.Equal(t, "bar", s)
f = js.GetPath("some_number").MustFloat64(99.0)
assert.Equal(t, f, 99.0)
}
func TestPathWillAugmentExisting(t *testing.T) {
js, err := NewJson([]byte(`{"this":{"a":"aa","b":"bb","c":"cc"}}`))
assert.Equal(t, nil, err)
js.SetPath([]string{"this", "d"}, "dd")
cases := []struct {
path []string
outcome string
}{
{
path: []string{"this", "a"},
outcome: "aa",
},
{
path: []string{"this", "b"},
outcome: "bb",
},
{
path: []string{"this", "c"},
outcome: "cc",
},
{
path: []string{"this", "d"},
outcome: "dd",
},
}
for _, tc := range cases {
s, err := js.GetPath(tc.path...).String()
assert.Equal(t, nil, err)
assert.Equal(t, tc.outcome, s)
}
}
func TestPathWillOverwriteExisting(t *testing.T) {
// notice how "a" is 0.1 - but then we'll try to set at path a, foo
js, err := NewJson([]byte(`{"this":{"a":0.1,"b":"bb","c":"cc"}}`))
assert.Equal(t, nil, err)
js.SetPath([]string{"this", "a", "foo"}, "bar")
s, err := js.GetPath("this", "a", "foo").String()
assert.Equal(t, nil, err)
assert.Equal(t, "bar", s)
}
func TestMustJson(t *testing.T) {
js := MustJson([]byte(`{"foo": "bar"}`))
assert.Equal(t, js.Get("foo").MustString(), "bar")
assert.PanicsWithValue(t, "could not unmarshal JSON: \"unexpected EOF\"", func() {
MustJson([]byte(`{`))
})
}

View file

@ -0,0 +1,48 @@
package main
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
elasticsearch "github.com/grafana/grafana/pkg/tsdb/elasticsearch"
)
var (
_ backend.QueryDataHandler = (*Datasource)(nil)
_ backend.CheckHealthHandler = (*Datasource)(nil)
_ backend.CallResourceHandler = (*Datasource)(nil)
)
func NewDatasource(context.Context, backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
return &Datasource{
Service: elasticsearch.ProvideService(httpclient.NewProvider()),
}, nil
}
type Datasource struct {
Service *elasticsearch.Service
}
func contextualMiddlewares(ctx context.Context) context.Context {
cfg := backend.GrafanaConfigFromContext(ctx)
responseLimitMiddleware := httpclient.ResponseLimitMiddleware(cfg.ResponseLimit())
ctx = httpclient.WithContextualMiddleware(ctx, responseLimitMiddleware)
return ctx
}
func (d *Datasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
ctx = contextualMiddlewares(ctx)
return d.Service.QueryData(ctx, req)
}
func (d *Datasource) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
ctx = contextualMiddlewares(ctx)
return d.Service.CallResource(ctx, req, sender)
}
func (d *Datasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
ctx = contextualMiddlewares(ctx)
return d.Service.CheckHealth(ctx, req)
}

View file

@ -0,0 +1,23 @@
package main
import (
"os"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
)
func main() {
// Start listening to requests sent from Grafana. This call is blocking so
// it won't finish until Grafana shuts down the process or the plugin choose
// to exit by itself using os.Exit. Manage automatically manages life cycle
// of datasource instances. It accepts datasource instance factory as first
// argument. This factory will be automatically called on incoming request
// from Grafana to create different instances of SampleDatasource (per datasource
// ID). When datasource configuration changed Dispose method will be called and
// new datasource instance created using NewSampleDatasource factory.
if err := datasource.Manage("elasticsearch", NewDatasource, datasource.ManageOpts{}); err != nil {
log.DefaultLogger.Error(err.Error())
os.Exit(1)
}
}

View file

@ -4,8 +4,6 @@ const cloudwatchPlugin = async () =>
await import(/* webpackChunkName: "cloudwatchPlugin" */ 'app/plugins/datasource/cloudwatch/module');
const dashboardDSPlugin = async () =>
await import(/* webpackChunkName "dashboardDSPlugin" */ 'app/plugins/datasource/dashboard/module');
const elasticsearchPlugin = async () =>
await import(/* webpackChunkName: "elasticsearchPlugin" */ 'app/plugins/datasource/elasticsearch/module');
const grafanaPlugin = async () =>
await import(/* webpackChunkName: "grafanaPlugin" */ 'app/plugins/datasource/grafana/module');
const influxdbPlugin = async () =>
@ -75,7 +73,6 @@ const builtInPlugins: Record<string, System.Module | (() => Promise<System.Modul
// datasources
'core:plugin/cloudwatch': cloudwatchPlugin,
'core:plugin/dashboard': dashboardDSPlugin,
'core:plugin/elasticsearch': elasticsearchPlugin,
'core:plugin/grafana': grafanaPlugin,
'core:plugin/influxdb': influxdbPlugin,
'core:plugin/mixed': mixedPlugin,

View file

@ -1,8 +1,7 @@
import { render, screen } from '@testing-library/react';
import { select } from 'react-select-event';
import { DateHistogram } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { DateHistogram } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { DateHistogramSettingsEditor } from './DateHistogramSettingsEditor';

View file

@ -4,9 +4,9 @@ import { GroupBase, OptionsOrGroups } from 'react-select';
import { InternalTimeZones, SelectableValue } from '@grafana/data';
import { InlineField, Input, Select, TimeZonePicker } from '@grafana/ui';
import { DateHistogram } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { calendarIntervals } from '../../../../QueryBuilder';
import { DateHistogram } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { useCreatableSelectPersistedBehaviour } from '../../../hooks/useCreatableSelectPersistedBehaviour';
import { changeBucketAggregationSetting } from '../state/actions';
@ -37,11 +37,11 @@ const hasValue =
const isValidNewOption = (
inputValue: string,
_: SelectableValue<string> | null,
options: OptionsOrGroups<unknown, GroupBase<unknown>>
options: OptionsOrGroups<SelectableValue<string>, GroupBase<SelectableValue<string>>>
) => {
// TODO: would be extremely nice here to allow only template variables and values that are
// valid date histogram's Interval options
const valueExists = (options as Array<SelectableValue<string>>).some(hasValue(inputValue));
const valueExists = options.some(hasValue(inputValue));
// we also don't want users to create "empty" values
return !valueExists && inputValue.trim().length > 0;
};

View file

@ -3,8 +3,8 @@ import { uniqueId } from 'lodash';
import { useEffect, useRef } from 'react';
import { InlineField, Input, QueryField } from '@grafana/ui';
import { Filters } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Filters } from '../../../../../dataquery.gen';
import { useDispatch, useStatelessReducer } from '../../../../../hooks/useStatelessReducer';
import { AddRemove } from '../../../../AddRemove';
import { changeBucketAggregationSetting } from '../../state/actions';

View file

@ -1,6 +1,6 @@
import { createAction } from '@reduxjs/toolkit';
import { Filter } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Filter } from '../../../../../../dataquery.gen';
export const addFilter = createAction('@bucketAggregations/filter/add');
export const removeFilter = createAction<number>('@bucketAggregations/filter/remove');

View file

@ -1,6 +1,5 @@
import { reducerTester } from 'test/core/redux/reducerTester';
import { Filter } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Filter } from '../../../../../../dataquery.gen';
import { reducerTester } from '../../../../../reducerTester';
import { addFilter, changeFilter, removeFilter } from './actions';
import { reducer } from './reducer';

View file

@ -1,7 +1,6 @@
import { Action } from 'redux';
import { Filter } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Filter } from '../../../../../../dataquery.gen';
import { defaultFilter } from '../utils';
import { addFilter, changeFilter, removeFilter } from './actions';

View file

@ -1,3 +1,3 @@
import { Filter } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Filter } from '../../../../../dataquery.gen';
export const defaultFilter = (): Filter => ({ label: '', query: '*' });

View file

@ -1,14 +1,7 @@
import { fireEvent, screen } from '@testing-library/react';
import selectEvent from 'react-select-event';
import {
Average,
Derivative,
ElasticsearchDataQuery,
Terms,
TopMetrics,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Average, Derivative, ElasticsearchDataQuery, Terms, TopMetrics } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { renderWithESProvider } from '../../../../test-helpers/render';
import { describeMetric } from '../../../../utils';

View file

@ -2,15 +2,9 @@ import { uniqueId } from 'lodash';
import { useRef } from 'react';
import { SelectableValue } from '@grafana/data';
import { InlineField, Select, Input } from '@grafana/ui';
import {
Terms,
ExtendedStats,
ExtendedStatMetaType,
Percentiles,
MetricAggregation,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { InlineField, Input, Select } from '@grafana/ui';
import { ExtendedStats, MetricAggregation, Percentiles, Terms } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { describeMetric } from '../../../../utils';
import { useQuery } from '../../ElasticsearchQueryContext';
@ -105,7 +99,7 @@ function createOrderByOptionsForExtendedStats(metric: ExtendedStats): Selectable
if (!metric.meta) {
return [];
}
const metaKeys = Object.keys(metric.meta) as ExtendedStatMetaType[];
const metaKeys = Object.keys(metric.meta);
return metaKeys
.filter((key) => metric.meta?.[key])
.map((key) => {

View file

@ -2,8 +2,8 @@ import { uniqueId } from 'lodash';
import { ComponentProps, useRef } from 'react';
import { InlineField, Input } from '@grafana/ui';
import { BucketAggregation } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { BucketAggregation } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { SettingsEditorContainer } from '../../SettingsEditorContainer';
import { changeBucketAggregationSetting } from '../state/actions';

View file

@ -1,7 +1,6 @@
import { BucketAggregation } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { BucketAggregation } from '../../../../dataquery.gen';
import { defaultGeoHashPrecisionString } from '../../../../queryDef';
import { describeMetric, convertOrderByToMetricId } from '../../../../utils';
import { convertOrderByToMetricId, describeMetric } from '../../../../utils';
import { useQuery } from '../../ElasticsearchQueryContext';
import { bucketAggregationConfig, orderByOptions, orderOptions } from '../utils';

View file

@ -1,10 +1,6 @@
import { createAction } from '@reduxjs/toolkit';
import {
BucketAggregation,
BucketAggregationType,
BucketAggregationWithField,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { BucketAggregation, BucketAggregationType, BucketAggregationWithField } from '../../../../dataquery.gen';
export const addBucketAggregation = createAction<BucketAggregation['id']>('@bucketAggs/add');
export const removeBucketAggregation = createAction<BucketAggregation['id']>('@bucketAggs/remove');

View file

@ -1,9 +1,4 @@
import {
BucketAggregation,
DateHistogram,
ElasticsearchDataQuery,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { BucketAggregation, DateHistogram, ElasticsearchDataQuery } from '../../../../dataquery.gen';
import { defaultBucketAgg } from '../../../../queryDef';
import { reducerTester } from '../../../reducerTester';
import { changeMetricType } from '../../MetricAggregationsEditor/state/actions';

View file

@ -1,7 +1,6 @@
import { Action } from '@reduxjs/toolkit';
import { BucketAggregation, ElasticsearchDataQuery, Terms } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { BucketAggregation, ElasticsearchDataQuery, Terms } from '../../../../dataquery.gen';
import { defaultBucketAgg } from '../../../../queryDef';
import { removeEmpty } from '../../../../utils';
import { changeMetricType } from '../../MetricAggregationsEditor/state/actions';
@ -47,11 +46,12 @@ export const createReducer =
}
/*
TODO: The previous version of the query editor was keeping some of the old bucket aggregation's configurations
in the new selected one (such as field or some settings).
It the future would be nice to have the same behavior but it's hard without a proper definition,
as Elasticsearch will error sometimes if some settings are not compatible.
*/
TODO: The previous version of the query editor was keeping some of the old bucket aggregation's configurations
in the new selected one (such as field or some settings).
It the future would be nice to have the same behavior but it's hard without a proper definition,
as Elasticsearch will error sometimes if some settings are not compatible.
*/
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
return {
id: bucketAgg.id,
type: action.payload.newType,

View file

@ -2,10 +2,10 @@ import { css } from '@emotion/css';
import { uniqueId } from 'lodash';
import { Fragment, useEffect } from 'react';
import { Input, InlineLabel } from '@grafana/ui';
import { BucketScript, MetricAggregation } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { InlineLabel, Input } from '@grafana/ui';
import { useStatelessReducer, useDispatch } from '../../../../../hooks/useStatelessReducer';
import { BucketScript, MetricAggregation } from '../../../../../dataquery.gen';
import { useDispatch, useStatelessReducer } from '../../../../../hooks/useStatelessReducer';
import { AddRemove } from '../../../../AddRemove';
import { MetricPicker } from '../../../../MetricPicker';
import { changeMetricAttribute } from '../../state/actions';
@ -13,9 +13,9 @@ import { SettingField } from '../SettingField';
import {
addPipelineVariable,
changePipelineVariableMetric,
removePipelineVariable,
renamePipelineVariable,
changePipelineVariableMetric,
} from './state/actions';
import { reducer } from './state/reducer';

View file

@ -1,5 +1,4 @@
import { PipelineVariable } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { PipelineVariable } from '../../../../../../dataquery.gen';
import { reducerTester } from '../../../../../reducerTester';
import {

View file

@ -1,7 +1,6 @@
import { Action } from '@reduxjs/toolkit';
import { PipelineVariable } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { PipelineVariable } from '../../../../../../dataquery.gen';
import { defaultPipelineVariable, generatePipelineVariableName } from '../utils';
import {

View file

@ -1,4 +1,4 @@
import { PipelineVariable } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { PipelineVariable } from '../../../../../dataquery.gen';
export const defaultPipelineVariable = (name: string): PipelineVariable => ({ name, pipelineAgg: '' });

View file

@ -2,11 +2,8 @@ import { uniqueId } from 'lodash';
import { ComponentProps, useState } from 'react';
import { InlineField, Input, TextArea } from '@grafana/ui';
import {
MetricAggregationWithSettings,
MetricAggregationWithInlineScript,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { MetricAggregationWithInlineScript, MetricAggregationWithSettings } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { getScriptValue } from '../../../../utils';
import { SettingKeyOf } from '../../../types';
@ -33,9 +30,11 @@ export function SettingField<T extends MetricAggregationWithSettings, K extends
const [id] = useState(uniqueId(`es-field-id-`));
const settings = metric.settings;
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
let defaultValue = settings?.[settingName as keyof typeof settings] || '';
if (settingName === 'script') {
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
defaultValue = getScriptValue(metric as MetricAggregationWithInlineScript);
}

View file

@ -2,8 +2,8 @@ import { css } from '@emotion/css';
import { SelectableValue } from '@grafana/data';
import { AsyncMultiSelect, InlineField, SegmentAsync, Select } from '@grafana/ui';
import { TopMetrics } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { TopMetrics } from '../../../../dataquery.gen';
import { useFields } from '../../../../hooks/useFields';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { orderOptions } from '../../BucketAggregationsEditor/utils';

View file

@ -1,8 +1,8 @@
import { fireEvent, render, screen } from '@testing-library/react';
import { getDefaultTimeRange } from '@grafana/data';
import { ElasticsearchDataQuery } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { ElasticsearchDataQuery } from '../../../../dataquery.gen';
import { ElasticDatasource } from '../../../../datasource';
import { ElasticsearchProvider } from '../../ElasticsearchQueryContext';

View file

@ -1,10 +1,10 @@
import { uniqueId } from 'lodash';
import { ComponentProps, useId, useRef, useState } from 'react';
import * as React from 'react';
import { ComponentProps, useId, useRef, useState } from 'react';
import { InlineField, Input, InlineSwitch, Select } from '@grafana/ui';
import { MetricAggregation, ExtendedStat } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { InlineField, InlineSwitch, Input, Select } from '@grafana/ui';
import { ExtendedStat, MetricAggregation } from '../../../../dataquery.gen';
import { useDispatch } from '../../../../hooks/useStatelessReducer';
import { extendedStats } from '../../../../queryDef';
import { SettingsEditorContainer } from '../../SettingsEditorContainer';

View file

@ -1,5 +1,4 @@
import { MetricAggregation } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { MetricAggregation } from '../../../../dataquery.gen';
import { extendedStats } from '../../../../queryDef';
const hasValue = (value: string) => (object: { value: string }) => object.value === value;

View file

@ -1,7 +1,6 @@
import { createAction } from '@reduxjs/toolkit';
import { MetricAggregation, MetricAggregationWithSettings } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { MetricAggregation, MetricAggregationWithSettings } from '../../../../dataquery.gen';
import { MetricAggregationWithMeta } from '../../../../types';
export const addMetric = createAction<MetricAggregation['id']>('@metrics/add');

View file

@ -1,10 +1,4 @@
import {
MetricAggregation,
ElasticsearchDataQuery,
Derivative,
ExtendedStats,
} from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { Derivative, ElasticsearchDataQuery, ExtendedStats, MetricAggregation } from '../../../../dataquery.gen';
import { defaultMetricAgg } from '../../../../queryDef';
import { reducerTester } from '../../../reducerTester';
import { changeEditorTypeAndResetQuery, initQuery } from '../../state';

View file

@ -1,7 +1,6 @@
import { Action } from '@reduxjs/toolkit';
import { ElasticsearchDataQuery, MetricAggregation } from 'app/plugins/datasource/elasticsearch/dataquery.gen';
import { ElasticsearchDataQuery, MetricAggregation } from '../../../../dataquery.gen';
import { defaultMetricAgg, queryTypeToMetricType } from '../../../../queryDef';
import { removeEmpty } from '../../../../utils';
import { changeEditorTypeAndResetQuery, initQuery } from '../../state';
@ -57,6 +56,7 @@ export const reducer = (
It the future would be nice to have the same behavior but it's hard without a proper definition,
as Elasticsearch will error sometimes if some settings are not compatible.
*/
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
return {
id: metric.id,
type: action.payload.type,

View file

@ -2,7 +2,7 @@ import { AnyAction } from '@reduxjs/toolkit';
import { cloneDeep } from 'lodash';
import { Action } from 'redux';
import { StoreState } from 'app/types/store';
import { StoreState } from '../types/store';
type GrafanaReducer<S = StoreState, A extends Action = AnyAction> = (state: S, action: A) => S;

View file

@ -0,0 +1 @@
import '@grafana/plugin-configs/jest/jest-setup';

View file

@ -0,0 +1,3 @@
import defaultConfig from '@grafana/plugin-configs/jest/jest.config.js';
export default defaultConfig;

View file

@ -0,0 +1,62 @@
{
"name": "@grafana-plugins/elasticsearch",
"description": "Grafana data source for Elasticsearch",
"private": true,
"version": "12.4.0-pre",
"dependencies": {
"@emotion/css": "11.13.5",
"@grafana/aws-sdk": "0.8.3",
"@grafana/data": "12.4.0-pre",
"@grafana/plugin-ui": "^0.11.1",
"@grafana/runtime": "12.4.0-pre",
"@grafana/schema": "12.4.0-pre",
"@grafana/ui": "12.4.0-pre",
"@reduxjs/toolkit": "2.10.1",
"lodash": "4.17.21",
"lucene": "^2.1.1",
"react": "18.3.1",
"react-dom": "18.3.1",
"react-redux": "9.2.0",
"react-select": "5.10.2",
"react-use": "17.6.0",
"redux": "5.0.1",
"redux-thunk": "3.1.0",
"rxjs": "7.8.2",
"semver": "7.7.3",
"tslib": "2.8.1"
},
"devDependencies": {
"@grafana/e2e-selectors": "12.4.0-pre",
"@grafana/plugin-configs": "12.4.0-pre",
"@testing-library/dom": "10.4.1",
"@testing-library/jest-dom": "6.6.4",
"@testing-library/react": "16.3.0",
"@testing-library/user-event": "14.6.1",
"@types/jest": "29.5.14",
"@types/lodash": "4.17.20",
"@types/lucene": "^2",
"@types/node": "24.10.1",
"@types/react": "18.3.18",
"@types/react-dom": "18.3.5",
"@types/semver": "7.7.1",
"jest": "29.7.0",
"react-select-event": "5.5.1",
"ts-node": "10.9.2",
"typescript": "5.9.2",
"webpack": "5.101.0"
},
"peerDependencies": {
"@grafana/runtime": "*"
},
"resolutions": {
"redux": "^5.0.0"
},
"scripts": {
"build": "webpack -c ./webpack.config.ts --env production",
"build:commit": "webpack -c ./webpack.config.ts --env production --env commit=$(git rev-parse --short HEAD)",
"dev": "webpack -w -c ./webpack.config.ts --env development",
"test": "jest --watch --onlyChanged",
"test:ci": "jest --maxWorkers 4"
},
"packageManager": "yarn@4.11.0"
}

View file

@ -2,6 +2,7 @@
"type": "datasource",
"name": "Elasticsearch",
"id": "elasticsearch",
"executable": "gpx_elasticsearch",
"category": "logging",
"info": {
"description": "Open source logging & analytics database",
@ -27,7 +28,8 @@
"name": "Documentation",
"url": "https://grafana.com/docs/grafana/latest/datasources/elasticsearch/"
}
]
],
"version": "%VERSION%"
},
"alerting": true,
"annotations": true,
@ -36,5 +38,9 @@
"backend": true,
"queryOptions": {
"minInterval": true
},
"dependencies": {
"grafanaDependency": ">=11.6.0",
"plugins": []
}
}

View file

@ -0,0 +1,9 @@
{
"$schema": "../../../../../node_modules/nx/schemas/project-schema.json",
"projectType": "library",
"tags": ["scope:plugin", "type:datasource"],
"targets": {
"build": {},
"dev": {}
}
}

View file

@ -0,0 +1,11 @@
import { createAction } from '@reduxjs/toolkit';
import { StoreState } from '../../types/store';
export type CleanUpAction = (state: StoreState) => void;
export interface CleanUpPayload {
cleanupAction: CleanUpAction;
}
export const cleanUpAction = createAction<CleanUpPayload>('core/cleanUpState');

View file

@ -0,0 +1,21 @@
import { ReducersMapObject } from '@reduxjs/toolkit';
import { Action as AnyAction, combineReducers } from 'redux';
const addedReducers = {
defaultReducer: (state = {}) => state,
templating: (state = { lastKey: 'key' }) => state,
};
export const addReducer = (newReducers: ReducersMapObject) => {
Object.assign(addedReducers, newReducers);
};
export const createRootReducer = () => {
const appReducer = combineReducers({
...addedReducers,
});
return (state: Parameters<typeof appReducer>[0], action: AnyAction) => {
return appReducer(state, action);
};
};

View file

@ -0,0 +1,47 @@
import { createListenerMiddleware, configureStore as reduxConfigureStore } from '@reduxjs/toolkit';
import { setupListeners } from '@reduxjs/toolkit/query';
import { Middleware } from 'redux';
import { addReducer, createRootReducer } from '../reducers/root';
import { StoreState } from '../types/store';
import { setStore } from './store';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function addRootReducer(reducers: any) {
// this is ok now because we add reducers before configureStore is called
// in the future if we want to add reducers during runtime
// we'll have to solve this in a more dynamic way
addReducer(reducers);
}
const listenerMiddleware = createListenerMiddleware();
const extraMiddleware: Middleware[] = [];
export function addExtraMiddleware(middleware: Middleware) {
extraMiddleware.push(middleware);
}
export function configureStore(initialState?: Partial<StoreState>) {
const store = reduxConfigureStore({
reducer: createRootReducer(),
middleware: (getDefaultMiddleware) =>
getDefaultMiddleware({ thunk: true, serializableCheck: false, immutableCheck: false }).concat(
listenerMiddleware.middleware,
...extraMiddleware
),
devTools: process.env.NODE_ENV !== 'production',
preloadedState: {
...initialState,
},
});
// this enables "refetchOnFocus" and "refetchOnReconnect" for RTK Query
setupListeners(store.dispatch);
setStore(store);
return store;
}
export type RootState = ReturnType<ReturnType<typeof configureStore>['getState']>;
export type AppDispatch = ReturnType<typeof configureStore>['dispatch'];

View file

@ -0,0 +1,26 @@
import { Store } from 'redux';
import { StoreState } from '../types/store';
export let store: Store<StoreState>;
export function setStore(newStore: Store<StoreState>) {
store = newStore;
}
export function getState(): StoreState {
if (!store || !store.getState) {
return { defaultReducer: () => ({}), templating: { lastKey: 'key' } }; // used by tests
}
return store.getState();
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function dispatch(action: any) {
if (!store || !store.getState) {
return;
}
return store.dispatch(action);
}

View file

@ -0,0 +1,8 @@
{
"compilerOptions": {
"jsx": "react-jsx",
"types": ["node", "jest", "@testing-library/jest-dom"]
},
"extends": "@grafana/plugin-configs/tsconfig.json",
"include": ["."]
}

View file

@ -0,0 +1,46 @@
/* eslint-disable no-restricted-imports */
import {
Action,
addListener as addListenerUntyped,
AsyncThunk,
AsyncThunkOptions,
AsyncThunkPayloadCreator,
createAsyncThunk as createAsyncThunkUntyped,
PayloadAction,
TypedAddListener,
} from '@reduxjs/toolkit';
import {
TypedUseSelectorHook,
useDispatch as useDispatchUntyped,
useSelector as useSelectorUntyped,
} from 'react-redux';
import { ThunkDispatch as GenericThunkDispatch, ThunkAction } from 'redux-thunk';
import type { createRootReducer } from '../reducers/root';
import { AppDispatch, RootState } from '../store/configureStore';
import { dispatch as storeDispatch } from '../store/store';
export type StoreState = ReturnType<ReturnType<typeof createRootReducer>>;
/*
* Utility type to get strongly types thunks
*/
export type ThunkResult<R> = ThunkAction<R, StoreState, undefined, PayloadAction<unknown>>;
export type ThunkDispatch = GenericThunkDispatch<StoreState, undefined, Action>;
// Typed useDispatch & useSelector hooks
export const useDispatch: () => AppDispatch = useDispatchUntyped;
export const useSelector: TypedUseSelectorHook<RootState> = useSelectorUntyped;
type DefaultThunkApiConfig = { dispatch: AppDispatch; state: StoreState };
export const createAsyncThunk = <Returned, ThunkArg = void, ThunkApiConfig extends {} = DefaultThunkApiConfig>(
typePrefix: string,
payloadCreator: AsyncThunkPayloadCreator<Returned, ThunkArg, ThunkApiConfig>,
options?: AsyncThunkOptions<ThunkArg, ThunkApiConfig>
): AsyncThunk<Returned, ThunkArg, ThunkApiConfig> =>
createAsyncThunkUntyped<Returned, ThunkArg, ThunkApiConfig>(typePrefix, payloadCreator, options);
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
export const addListener = addListenerUntyped as TypedAddListener<RootState, AppDispatch>;
export const dispatch: AppDispatch = storeDispatch;

View file

@ -0,0 +1,9 @@
import type { Configuration } from 'webpack';
import grafanaConfig, { type Env } from '@grafana/plugin-configs/webpack.config.ts';
const config = async (env: Env): Promise<Configuration> => {
return await grafanaConfig(env);
};
export default config;

View file

@ -2610,6 +2610,53 @@ __metadata:
languageName: node
linkType: hard
"@grafana-plugins/elasticsearch@workspace:public/app/plugins/datasource/elasticsearch":
version: 0.0.0-use.local
resolution: "@grafana-plugins/elasticsearch@workspace:public/app/plugins/datasource/elasticsearch"
dependencies:
"@emotion/css": "npm:11.13.5"
"@grafana/aws-sdk": "npm:0.8.3"
"@grafana/data": "npm:12.4.0-pre"
"@grafana/e2e-selectors": "npm:12.4.0-pre"
"@grafana/plugin-configs": "npm:12.4.0-pre"
"@grafana/plugin-ui": "npm:^0.11.1"
"@grafana/runtime": "npm:12.4.0-pre"
"@grafana/schema": "npm:12.4.0-pre"
"@grafana/ui": "npm:12.4.0-pre"
"@reduxjs/toolkit": "npm:2.10.1"
"@testing-library/dom": "npm:10.4.1"
"@testing-library/jest-dom": "npm:6.6.4"
"@testing-library/react": "npm:16.3.0"
"@testing-library/user-event": "npm:14.6.1"
"@types/jest": "npm:29.5.14"
"@types/lodash": "npm:4.17.20"
"@types/lucene": "npm:^2"
"@types/node": "npm:24.10.1"
"@types/react": "npm:18.3.18"
"@types/react-dom": "npm:18.3.5"
"@types/semver": "npm:7.7.1"
jest: "npm:29.7.0"
lodash: "npm:4.17.21"
lucene: "npm:^2.1.1"
react: "npm:18.3.1"
react-dom: "npm:18.3.1"
react-redux: "npm:9.2.0"
react-select: "npm:5.10.2"
react-select-event: "npm:5.5.1"
react-use: "npm:17.6.0"
redux: "npm:5.0.1"
redux-thunk: "npm:3.1.0"
rxjs: "npm:7.8.2"
semver: "npm:7.7.3"
ts-node: "npm:10.9.2"
tslib: "npm:2.8.1"
typescript: "npm:5.9.2"
webpack: "npm:5.101.0"
peerDependencies:
"@grafana/runtime": "*"
languageName: unknown
linkType: soft
"@grafana-plugins/grafana-azure-monitor-datasource@workspace:public/app/plugins/datasource/azuremonitor":
version: 0.0.0-use.local
resolution: "@grafana-plugins/grafana-azure-monitor-datasource@workspace:public/app/plugins/datasource/azuremonitor"