SQLSchemas: Move the types to the query types package (#116745)

This commit is contained in:
Ryan McKinley 2026-01-23 17:32:46 +03:00 committed by GitHub
parent 6ece3a6279
commit 0039714046
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 528 additions and 361 deletions

View file

@ -7,7 +7,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
data "github.com/grafana/grafana-plugin-sdk-go/experimental/apis/data/v0alpha1"
"github.com/grafana/grafana/pkg/expr"
)
// Generic query request with shared time across all values
@ -29,14 +28,6 @@ type QueryDataResponse struct {
backend.QueryDataResponse `json:",inline"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type SQLSchemas struct {
metav1.TypeMeta `json:",inline"`
// Backend wrapper (external dependency)
expr.SQLSchemas `json:"sqlSchemas,inline"`
}
// GetResponseCode return the right status code for the response by checking the responses.
func GetResponseCode(rsp *backend.QueryDataResponse) int {
if rsp == nil {

View file

@ -0,0 +1,187 @@
package v0alpha1
import (
"encoding/json"
"reflect"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
openapi "k8s.io/kube-openapi/pkg/common"
spec "k8s.io/kube-openapi/pkg/validation/spec"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// SQLSchemas returns info about what the Schema for a DS query will be like if the
// query were to be used an input to SQL expressions. So effectively post SQL expressions input
// conversion.
//
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type QueryResponseSQLSchemas struct {
metav1.TypeMeta `json:",inline"`
// SchemaInfo for each requested query by refID
SQLSchemas SQLSchemas `json:"sqlSchemas"`
}
type SQLSchemas = map[string]SchemaInfo
// BasicColumn represents the column type for data that is input to a SQL expression.
type BasicColumn struct {
Name string `json:"name"`
MySQLType string `json:"mysqlType"`
Nullable bool `json:"nullable"`
// The DataFrameFieldType is the Grafana Plugin SDK data.FieldType that best represents this column
// TODO: the OpenAPI thinks this is an integer, but data.FieldType is a uint8 alias.
// we need to somehow expose this as a string value because the JSONMarshaler will write it as a string
DataFrameFieldType data.FieldType `json:"dataFrameFieldType"`
}
// SchemaInfo provides information and some sample data for data that could be an input
// to a SQL expression.
type SchemaInfo struct {
// +listType=atomic
Columns []BasicColumn `json:"columns"`
SampleRows SampleRows `json:"sampleRows"`
Error string `json:"error,omitempty"`
}
// There is a a manual DeepCopy at the end of this file that will need to be updated when this our the
// underlying structs are change. The hack script will also need to be run to update the Query service API
// generated types.
type SampleRows struct {
// +listType=atomic
values [][]any
}
func NewSampleRows(values [][]any) SampleRows {
return SampleRows{values: values}
}
func (u *SampleRows) Values() [][]any {
return u.values
}
func (u *SampleRows) MarshalJSON() ([]byte, error) {
return json.Marshal(u.values)
}
func (u *SampleRows) UnmarshalJSON(data []byte) error {
return json.Unmarshal(data, &u.values)
}
// Produce an API definition that represents [][]any
func (u SampleRows) OpenAPIDefinition() openapi.OpenAPIDefinition {
return openapi.OpenAPIDefinition{
Schema: *spec.ArrayProperty(spec.ArrayProperty( // Array of Array
&spec.Schema{
SchemaProps: spec.SchemaProps{ // no specific type for inner any
AdditionalProperties: &spec.SchemaOrBool{Allows: true},
},
},
)).WithDescription("[][]any"), // frontend says number | string | boolean | object
}
}
// DeepCopy returns a deep copy of the SampleRows.
func (in *SampleRows) DeepCopy() *SampleRows {
if in == nil {
return nil
}
out := NewSampleRows(deepCopySampleRows2D(in.Values()))
return &out
}
// Deep-copy [][]any preserving nil vs empty slices and cloning elements.
func deepCopySampleRows2D(in [][]any) [][]any {
if in == nil {
return nil
}
out := make([][]any, len(in))
for i, row := range in {
if row == nil {
// preserve nil inner slice
continue
}
newRow := make([]any, len(row))
for j, v := range row {
newRow[j] = deepCopyAny(v)
}
out[i] = newRow
}
return out
}
// Recursively clone pointers, maps, slices, arrays, and interfaces.
// Structs are copied by value (shallow for their internals).
func deepCopyAny(v any) any {
if v == nil {
return nil
}
return deepCopyRV(reflect.ValueOf(v)).Interface()
}
func deepCopyRV(rv reflect.Value) reflect.Value {
if !rv.IsValid() {
return rv
}
switch rv.Kind() {
case reflect.Pointer:
if rv.IsNil() {
return rv
}
elemCopy := deepCopyRV(rv.Elem())
newPtr := reflect.New(rv.Type().Elem())
if elemCopy.Type().AssignableTo(newPtr.Elem().Type()) {
newPtr.Elem().Set(elemCopy)
} else if elemCopy.Type().ConvertibleTo(newPtr.Elem().Type()) {
newPtr.Elem().Set(elemCopy.Convert(newPtr.Elem().Type()))
} else {
newPtr.Elem().Set(rv.Elem()) // fallback: shallow
}
return newPtr
case reflect.Interface:
if rv.IsNil() {
return rv
}
return deepCopyRV(rv.Elem())
case reflect.Map:
if rv.IsNil() {
return reflect.Zero(rv.Type())
}
newMap := reflect.MakeMapWithSize(rv.Type(), rv.Len())
for _, k := range rv.MapKeys() {
newMap.SetMapIndex(deepCopyRV(k), deepCopyRV(rv.MapIndex(k)))
}
return newMap
case reflect.Slice:
if rv.IsNil() {
return reflect.Zero(rv.Type())
}
n := rv.Len()
newSlice := reflect.MakeSlice(rv.Type(), n, n)
for i := 0; i < n; i++ {
newSlice.Index(i).Set(deepCopyRV(rv.Index(i)))
}
return newSlice
case reflect.Array:
newArr := reflect.New(rv.Type()).Elem()
for i := range rv.Len() {
newArr.Index(i).Set(deepCopyRV(rv.Index(i)))
}
return newArr
case reflect.Struct:
// Value copy (OK unless the struct contains references you also want deep-copied).
return rv
default:
// Scalars (string, bool, numbers), etc.
return rv
}
}

View file

@ -11,6 +11,22 @@ import (
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *BasicColumn) DeepCopyInto(out *BasicColumn) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicColumn.
func (in *BasicColumn) DeepCopy() *BasicColumn {
if in == nil {
return nil
}
out := new(BasicColumn)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DataSourceApiServer) DeepCopyInto(out *DataSourceApiServer) {
*out = *in
@ -203,6 +219,38 @@ func (in *QueryDataResponse) DeepCopyObject() runtime.Object {
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *QueryResponseSQLSchemas) DeepCopyInto(out *QueryResponseSQLSchemas) {
*out = *in
out.TypeMeta = in.TypeMeta
if in.SQLSchemas != nil {
in, out := &in.SQLSchemas, &out.SQLSchemas
*out = make(map[string]SchemaInfo, len(*in))
for key, val := range *in {
(*out)[key] = *val.DeepCopy()
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryResponseSQLSchemas.
func (in *QueryResponseSQLSchemas) DeepCopy() *QueryResponseSQLSchemas {
if in == nil {
return nil
}
out := new(QueryResponseSQLSchemas)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *QueryResponseSQLSchemas) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *QueryTypeDefinition) DeepCopyInto(out *QueryTypeDefinition) {
*out = *in
@ -264,27 +312,30 @@ func (in *QueryTypeDefinitionList) DeepCopyObject() runtime.Object {
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SQLSchemas) DeepCopyInto(out *SQLSchemas) {
*out = *in
out.TypeMeta = in.TypeMeta
out.SQLSchemas = in.SQLSchemas.DeepCopy()
func (in *SampleRows) DeepCopyInto(out *SampleRows) {
clone := in.DeepCopy()
*out = *clone
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SQLSchemas.
func (in *SQLSchemas) DeepCopy() *SQLSchemas {
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SchemaInfo) DeepCopyInto(out *SchemaInfo) {
*out = *in
if in.Columns != nil {
in, out := &in.Columns, &out.Columns
*out = make([]BasicColumn, len(*in))
copy(*out, *in)
}
in.SampleRows.DeepCopyInto(&out.SampleRows)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchemaInfo.
func (in *SchemaInfo) DeepCopy() *SchemaInfo {
if in == nil {
return nil
}
out := new(SQLSchemas)
out := new(SchemaInfo)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *SQLSchemas) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}

View file

@ -14,6 +14,7 @@ import (
func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenAPIDefinition {
return map[string]common.OpenAPIDefinition{
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.BasicColumn": schema_pkg_apis_query_v0alpha1_BasicColumn(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.DataSourceApiServer": schema_pkg_apis_query_v0alpha1_DataSourceApiServer(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.DataSourceApiServerList": schema_pkg_apis_query_v0alpha1_DataSourceApiServerList(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.DataSourceConnection": schema_pkg_apis_query_v0alpha1_DataSourceConnection(ref),
@ -21,9 +22,54 @@ func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenA
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.DataSourceConnectionRef": schema_pkg_apis_query_v0alpha1_DataSourceConnectionRef(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryDataRequest": schema_pkg_apis_query_v0alpha1_QueryDataRequest(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryDataResponse": schema_pkg_apis_query_v0alpha1_QueryDataResponse(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryResponseSQLSchemas": schema_pkg_apis_query_v0alpha1_QueryResponseSQLSchemas(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryTypeDefinition": schema_pkg_apis_query_v0alpha1_QueryTypeDefinition(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryTypeDefinitionList": schema_pkg_apis_query_v0alpha1_QueryTypeDefinitionList(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.SQLSchemas": schema_pkg_apis_query_v0alpha1_SQLSchemas(ref),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.SampleRows": SampleRows{}.OpenAPIDefinition(),
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.SchemaInfo": schema_pkg_apis_query_v0alpha1_SchemaInfo(ref),
}
}
func schema_pkg_apis_query_v0alpha1_BasicColumn(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Description: "BasicColumn represents the column type for data that is input to a SQL expression.",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"name": {
SchemaProps: spec.SchemaProps{
Default: "",
Type: []string{"string"},
Format: "",
},
},
"mysqlType": {
SchemaProps: spec.SchemaProps{
Default: "",
Type: []string{"string"},
Format: "",
},
},
"nullable": {
SchemaProps: spec.SchemaProps{
Default: false,
Type: []string{"boolean"},
Format: "",
},
},
"dataFrameFieldType": {
SchemaProps: spec.SchemaProps{
Description: "The DataFrameFieldType is the Grafana Plugin SDK data.FieldType that best represents this column we need to somehow expose this as a string value because the JSONMarshaler will write it as a string",
Default: 0,
Type: []string{"integer"},
Format: "int32",
},
},
},
Required: []string{"name", "mysqlType", "nullable", "dataFrameFieldType"},
},
},
}
}
@ -395,6 +441,51 @@ func schema_pkg_apis_query_v0alpha1_QueryDataResponse(ref common.ReferenceCallba
}
}
func schema_pkg_apis_query_v0alpha1_QueryResponseSQLSchemas(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Description: "SQLSchemas returns info about what the Schema for a DS query will be like if the query were to be used an input to SQL expressions. So effectively post SQL expressions input conversion.",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"kind": {
SchemaProps: spec.SchemaProps{
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
Type: []string{"string"},
Format: "",
},
},
"apiVersion": {
SchemaProps: spec.SchemaProps{
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
Type: []string{"string"},
Format: "",
},
},
"sqlSchemas": {
SchemaProps: spec.SchemaProps{
Description: "SchemaInfo for each requested query by refID",
Type: []string{"object"},
AdditionalProperties: &spec.SchemaOrBool{
Allows: true,
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Default: map[string]interface{}{},
Ref: ref("github.com/grafana/grafana/pkg/apis/query/v0alpha1.SchemaInfo"),
},
},
},
},
},
},
Required: []string{"sqlSchemas"},
},
},
Dependencies: []string{
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.SchemaInfo"},
}
}
func schema_pkg_apis_query_v0alpha1_QueryTypeDefinition(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
@ -484,28 +575,47 @@ func schema_pkg_apis_query_v0alpha1_QueryTypeDefinitionList(ref common.Reference
}
}
func schema_pkg_apis_query_v0alpha1_SQLSchemas(ref common.ReferenceCallback) common.OpenAPIDefinition {
func schema_pkg_apis_query_v0alpha1_SchemaInfo(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Type: []string{"object"},
Description: "SchemaInfo provides information and some sample data for data that could be an input to a SQL expression.",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"kind": {
"columns": {
VendorExtensible: spec.VendorExtensible{
Extensions: spec.Extensions{
"x-kubernetes-list-type": "atomic",
},
},
SchemaProps: spec.SchemaProps{
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
Type: []string{"string"},
Format: "",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Default: map[string]interface{}{},
Ref: ref("github.com/grafana/grafana/pkg/apis/query/v0alpha1.BasicColumn"),
},
},
},
},
},
"apiVersion": {
"sampleRows": {
SchemaProps: spec.SchemaProps{
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
Type: []string{"string"},
Format: "",
Ref: ref("github.com/grafana/grafana/pkg/apis/query/v0alpha1.SampleRows"),
},
},
"error": {
SchemaProps: spec.SchemaProps{
Type: []string{"string"},
Format: "",
},
},
},
Required: []string{"columns", "sampleRows"},
},
},
Dependencies: []string{
"github.com/grafana/grafana/pkg/apis/query/v0alpha1.BasicColumn", "github.com/grafana/grafana/pkg/apis/query/v0alpha1.SampleRows"},
}
}

View file

@ -1,3 +1,5 @@
API rule violation: list_type_missing,github.com/grafana/grafana/pkg/apis/query/v0alpha1,DataSourceApiServer,AliasIDs
API rule violation: names_match,github.com/grafana/grafana/pkg/apis/query/v0alpha1,BasicColumn,MySQLType
API rule violation: names_match,github.com/grafana/grafana/pkg/apis/query/v0alpha1,SampleRows,values
API rule violation: streaming_list_type_json_tags,github.com/grafana/grafana/pkg/apis/query/v0alpha1,DataSourceApiServerList,ListMeta
API rule violation: streaming_list_type_json_tags,github.com/grafana/grafana/pkg/apis/query/v0alpha1,DataSourceConnectionList,ListMeta

View file

@ -2,42 +2,17 @@ package expr
import (
"context"
"reflect"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
"github.com/grafana/grafana/pkg/expr/mathexp"
"github.com/grafana/grafana/pkg/expr/sql"
)
// BasicColumn represents the column type for data that is input to a SQL expression.
type BasicColumn struct {
Name string `json:"name"`
MySQLType string `json:"mysqlType"`
Nullable bool `json:"nullable"`
DataFrameFieldType data.FieldType `json:"dataFrameFieldType"`
}
// SchemaInfo provides information and some sample data for data that could be an input
// to a SQL expression.
type SchemaInfo struct {
Columns []BasicColumn `json:"columns"`
SampleRows [][]any `json:"sampleRows"`
Error string `json:"error,omitempty"`
}
// SQLSchemas returns info about what the Schema for a DS query will be like if the
// query were to be used an input to SQL expressions. So effectively post SQL expressions input
// conversion.
// There is a a manual DeepCopy at the end of this file that will need to be updated when this our the
// underlying structs are change. The hack script will also need to be run to update the Query service API
// generated types.
type SQLSchemas map[string]SchemaInfo
// GetSQLSchemas returns what the schemas are for SQL expressions for all DS queries
// in the request. It executes the queries to get the schemas.
// Intended use is for autocomplete and AI, so used during the authoring/editing experience only.
func (s *Service) GetSQLSchemas(ctx context.Context, req Request) (SQLSchemas, error) {
func (s *Service) GetSQLSchemas(ctx context.Context, req Request) (queryV0.SQLSchemas, error) {
// Extract DS Nodes and Execute Them
// Building the pipeline is maybe not best, as it can have more errors.
filtered := make([]Query, 0, len(req.Queries))
@ -52,7 +27,7 @@ func (s *Service) GetSQLSchemas(ctx context.Context, req Request) (SQLSchemas, e
return nil, err
}
var schemas = make(SQLSchemas)
var schemas = make(queryV0.SQLSchemas)
for _, node := range pipeline {
// For now, execute calls convert at the end, so we are being lazy and running the full conversion. Longer run we want to run without
@ -64,27 +39,27 @@ func (s *Service) GetSQLSchemas(ctx context.Context, req Request) (SQLSchemas, e
// TODO: check where time is coming from, don't recall
res, err := dsNode.Execute(ctx, time.Now(), mathexp.Vars{}, s)
if err != nil {
schemas[dsNode.RefID()] = SchemaInfo{Error: err.Error()}
schemas[dsNode.RefID()] = queryV0.SchemaInfo{Error: err.Error()}
continue
// we want to continue and get the schemas we can
}
if res.Error != nil {
schemas[dsNode.RefID()] = SchemaInfo{Error: res.Error.Error()}
schemas[dsNode.RefID()] = queryV0.SchemaInfo{Error: res.Error.Error()}
continue
// we want to continue and get the schemas we can
}
frames := res.Values.AsDataFrames(dsNode.RefID())
if len(frames) == 0 {
schemas[dsNode.RefID()] = SchemaInfo{Error: "no data"}
schemas[dsNode.RefID()] = queryV0.SchemaInfo{Error: "no data"}
}
frame := frames[0]
schema := sql.SchemaFromFrame(frame)
columns := make([]BasicColumn, 0, len(schema))
columns := make([]queryV0.BasicColumn, 0, len(schema))
for _, col := range schema {
fT, _ := sql.MySQLColToFieldType(col)
columns = append(columns, BasicColumn{
columns = append(columns, queryV0.BasicColumn{
Name: col.Name,
MySQLType: col.Type.String(),
Nullable: col.Nullable,
@ -103,129 +78,11 @@ func (s *Service) GetSQLSchemas(ctx context.Context, req Request) (SQLSchemas, e
sampleRows = append(sampleRows, frame.RowCopy(i))
}
schemas[dsNode.RefID()] = SchemaInfo{Columns: columns, SampleRows: sampleRows}
schemas[dsNode.RefID()] = queryV0.SchemaInfo{
Columns: columns,
SampleRows: queryV0.NewSampleRows(sampleRows),
}
}
return schemas, nil
}
// DeepCopy returns a deep copy of the schema.
// Used AI to make it, the kubernetes one doesn't like any or interface{}
func (s SQLSchemas) DeepCopy() SQLSchemas {
if s == nil {
return nil
}
out := make(SQLSchemas, len(s))
for k, v := range s {
out[k] = SchemaInfo{
Columns: copyColumns(v.Columns),
SampleRows: deepCopySampleRows2D(v.SampleRows),
Error: v.Error,
}
}
return out
}
func copyColumns(in []BasicColumn) []BasicColumn {
if in == nil {
return nil
}
out := make([]BasicColumn, len(in))
copy(out, in) // BasicColumn is value-only, so this suffices
return out
}
// Deep-copy [][]any preserving nil vs empty slices and cloning elements.
func deepCopySampleRows2D(in [][]any) [][]any {
if in == nil {
return nil
}
out := make([][]any, len(in))
for i, row := range in {
if row == nil {
// preserve nil inner slice
continue
}
newRow := make([]any, len(row))
for j, v := range row {
newRow[j] = deepCopyAny(v)
}
out[i] = newRow
}
return out
}
// Recursively clone pointers, maps, slices, arrays, and interfaces.
// Structs are copied by value (shallow for their internals).
func deepCopyAny(v any) any {
if v == nil {
return nil
}
return deepCopyRV(reflect.ValueOf(v)).Interface()
}
func deepCopyRV(rv reflect.Value) reflect.Value {
if !rv.IsValid() {
return rv
}
switch rv.Kind() {
case reflect.Ptr:
if rv.IsNil() {
return rv
}
elemCopy := deepCopyRV(rv.Elem())
newPtr := reflect.New(rv.Type().Elem())
if elemCopy.Type().AssignableTo(newPtr.Elem().Type()) {
newPtr.Elem().Set(elemCopy)
} else if elemCopy.Type().ConvertibleTo(newPtr.Elem().Type()) {
newPtr.Elem().Set(elemCopy.Convert(newPtr.Elem().Type()))
} else {
newPtr.Elem().Set(rv.Elem()) // fallback: shallow
}
return newPtr
case reflect.Interface:
if rv.IsNil() {
return rv
}
return deepCopyRV(rv.Elem())
case reflect.Map:
if rv.IsNil() {
return reflect.Zero(rv.Type())
}
newMap := reflect.MakeMapWithSize(rv.Type(), rv.Len())
for _, k := range rv.MapKeys() {
newMap.SetMapIndex(deepCopyRV(k), deepCopyRV(rv.MapIndex(k)))
}
return newMap
case reflect.Slice:
if rv.IsNil() {
return reflect.Zero(rv.Type())
}
n := rv.Len()
newSlice := reflect.MakeSlice(rv.Type(), n, n)
for i := 0; i < n; i++ {
newSlice.Index(i).Set(deepCopyRV(rv.Index(i)))
}
return newSlice
case reflect.Array:
n := rv.Len()
newArr := reflect.New(rv.Type()).Elem()
for i := 0; i < n; i++ {
newArr.Index(i).Set(deepCopyRV(rv.Index(i)))
}
return newArr
case reflect.Struct:
// Value copy (OK unless the struct contains references you also want deep-copied).
return rv
default:
// Scalars (string, bool, numbers), etc.
return rv
}
}

View file

@ -3,6 +3,7 @@ package query
import (
"context"
"encoding/json"
"fmt"
"runtime"
"github.com/prometheus/client_golang/prometheus"
@ -164,7 +165,7 @@ func addKnownTypes(scheme *apiruntime.Scheme, gv schema.GroupVersion) {
&query.QueryDataResponse{},
&query.QueryTypeDefinition{},
&query.QueryTypeDefinitionList{},
&query.SQLSchemas{},
&query.QueryResponseSQLSchemas{},
)
}
@ -205,8 +206,6 @@ func (b *QueryAPIBuilder) UpdateAPIGroupInfo(apiGroupInfo *genericapiserver.APIG
// The query endpoint -- NOTE, this uses a rewrite hack to allow requests without a name parameter
storage["query"] = newQueryREST(b)
storage["sqlschemas"] = newSQLSchemasREST(b)
// Register the expressions query schemas
err := queryschema.RegisterQueryTypes(b.queryTypes, storage)
@ -304,5 +303,15 @@ func (b *QueryAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.OpenAPI
return oas, nil
}
// Use the same request body for query as sql schemas
query, ok := oas.Paths.Paths[root+"namespaces/{namespace}/query"]
if !ok || query.Post == nil || query.Post.RequestBody == nil {
return nil, fmt.Errorf("could not find query path")
}
sqlschemas, ok := oas.Paths.Paths[root+"namespaces/{namespace}/sqlschemas"]
if ok && sqlschemas.Post != nil {
sqlschemas.Post.RequestBody = query.Post.RequestBody
}
return oas, nil
}

View file

@ -0,0 +1,60 @@
package query
import (
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/kube-openapi/pkg/spec3"
"k8s.io/kube-openapi/pkg/validation/spec"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
)
func (b *QueryAPIBuilder) GetAPIRoutes(gv schema.GroupVersion) *builder.APIRoutes {
defs := b.GetOpenAPIDefinitions()(func(path string) spec.Ref { return spec.Ref{} })
sqlSchemas := defs["github.com/grafana/grafana/pkg/apis/query/v0alpha1.QueryResponseSQLSchemas"].Schema
return &builder.APIRoutes{
Namespace: []builder.APIRouteHandler{
{
Path: "sqlschemas",
Spec: &spec3.PathProps{
Post: &spec3.Operation{
OperationProps: spec3.OperationProps{
Tags: []string{"Query SQL Schemas"},
OperationId: "querySqlSchemas",
Description: "Get a SQL Schema for a set of queries",
Parameters: []*spec3.Parameter{
{
ParameterProps: spec3.ParameterProps{
Name: "namespace",
In: "path",
Required: true,
Example: "default",
Description: "workspace",
Schema: spec.StringProperty(),
},
},
},
Responses: &spec3.Responses{
ResponsesProps: spec3.ResponsesProps{
StatusCodeResponses: map[int]*spec3.Response{
200: {
ResponseProps: spec3.ResponseProps{
Content: map[string]*spec3.MediaType{
"application/json": {
MediaTypeProps: spec3.MediaTypeProps{
Schema: &sqlSchemas,
},
},
},
},
},
},
},
},
},
},
},
Handler: b.GetSQLSchemas,
},
},
}
}

View file

@ -2,152 +2,64 @@ package query
import (
"context"
"encoding/json"
"net/http"
"strconv"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/expr"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/codes"
"github.com/gorilla/mux"
errorsK8s "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
query "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
"github.com/grafana/grafana/pkg/infra/log"
service "github.com/grafana/grafana/pkg/services/query"
"github.com/grafana/grafana/pkg/util/errhttp"
"github.com/grafana/grafana/pkg/web"
)
type sqlSchemaREST struct {
logger log.Logger
builder *QueryAPIBuilder
}
var (
_ rest.Storage = (*sqlSchemaREST)(nil)
_ rest.SingularNameProvider = (*sqlSchemaREST)(nil)
_ rest.Connecter = (*sqlSchemaREST)(nil)
_ rest.Scoper = (*sqlSchemaREST)(nil)
_ rest.StorageMetadata = (*sqlSchemaREST)(nil)
)
func newSQLSchemasREST(builder *QueryAPIBuilder) *sqlSchemaREST {
return &sqlSchemaREST{
logger: log.New("query.sqlschemas"),
builder: builder,
}
}
func (r *sqlSchemaREST) New() runtime.Object {
// This is added as the "ResponseType" regardless what ProducesObject() says :)
return &query.SQLSchemas{}
}
func (r *sqlSchemaREST) Destroy() {}
func (r *sqlSchemaREST) NamespaceScoped() bool {
return true
}
func (r *sqlSchemaREST) GetSingularName() string {
return "SQLSchema" // Used for the
}
func (r *sqlSchemaREST) ProducesMIMETypes(verb string) []string {
return []string{"application/json"} // and parquet!
}
func (r *sqlSchemaREST) ProducesObject(verb string) interface{} {
return &query.SQLSchemas{}
}
func (r *sqlSchemaREST) ConnectMethods() []string {
return []string{"POST"}
}
func (r *sqlSchemaREST) NewConnectOptions() (runtime.Object, bool, string) {
return nil, false, "" // true means you can use the trailing path as a variable
}
// called by mt query service and also when queryServiceFromUI is enabled, can be both mt and st
func (r *sqlSchemaREST) Connect(connectCtx context.Context, name string, _ runtime.Object, incomingResponder rest.Responder) (http.Handler, error) {
// See: /pkg/services/apiserver/builder/helper.go#L34
// The name is set with a rewriter hack
if name != "name" {
r.logger.Debug("Connect name is not name")
return nil, errorsK8s.NewNotFound(schema.GroupResource{}, name)
func (b *QueryAPIBuilder) GetSQLSchemas(w http.ResponseWriter, r *http.Request) {
ctx, span := b.tracer.Start(r.Context(), "QueryService.GetSQLSchemas")
defer span.End()
ns := mux.Vars(r)["namespace"]
ctx = request.WithNamespace(ctx, ns)
traceId := span.SpanContext().TraceID()
connectLogger := b.log.New("traceId", traceId.String(), "rule_uid", r.Header.Get("X-Rule-Uid"))
raw := &query.QueryDataRequest{}
err := web.Bind(r, raw)
if err != nil {
connectLogger.Error("Hit unexpected error when reading query", "err", err)
err = errorsK8s.NewBadRequest("error reading query")
errhttp.Write(ctx, err, w)
return
}
b := r.builder
return http.HandlerFunc(func(w http.ResponseWriter, httpreq *http.Request) {
ctx, span := b.tracer.Start(httpreq.Context(), "QueryService.GetSQLSchemas")
defer span.End()
ctx = request.WithNamespace(ctx, request.NamespaceValue(connectCtx))
traceId := span.SpanContext().TraceID()
connectLogger := b.log.New("traceId", traceId.String(), "rule_uid", httpreq.Header.Get("X-Rule-Uid"))
responder := newResponderWrapper(incomingResponder,
func(statusCode *int, obj runtime.Object) {
if *statusCode/100 == 4 {
span.SetStatus(codes.Error, strconv.Itoa(*statusCode))
}
qdr, err := handleSQLSchemaQuery(ctx, *raw, *b, r, connectLogger)
if err != nil {
errhttp.Write(ctx, err, w)
return
}
if *statusCode >= 500 {
o, ok := obj.(*query.QueryDataResponse)
if ok && o.Responses != nil {
for refId, response := range o.Responses {
if response.ErrorSource == backend.ErrorSourceDownstream {
*statusCode = http.StatusBadRequest //force this to be a 400 since it's downstream
span.SetStatus(codes.Error, strconv.Itoa(*statusCode))
span.SetAttributes(attribute.String("error.source", "downstream"))
break
} else if response.Error != nil {
connectLogger.Debug("500 error without downstream error source", "error", response.Error, "errorSource", response.ErrorSource, "refId", refId)
span.SetStatus(codes.Error, "500 error without downstream error source")
} else {
span.SetStatus(codes.Error, "500 error without downstream error source and no Error message")
span.SetAttributes(attribute.String("error.ref_id", refId))
}
}
}
}
},
func(err error) {
connectLogger.Error("error caught in handler", "err", err)
span.SetStatus(codes.Error, "query error")
if err == nil {
return
}
span.RecordError(err)
})
raw := &query.QueryDataRequest{}
err := web.Bind(httpreq, raw)
if err != nil {
connectLogger.Error("Hit unexpected error when reading query", "err", err)
err = errorsK8s.NewBadRequest("error reading query")
responder.Error(err)
return
}
qdr, err := handleSQLSchemaQuery(ctx, *raw, *b, httpreq, *responder, connectLogger)
if err != nil {
responder.Error(err)
return
}
responder.Object(200, &query.SQLSchemas{
SQLSchemas: qdr,
})
}), nil
// Write the response
w.Header().Set("Content-Type", "application/json")
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ") // pretty print
err = encoder.Encode(&query.QueryResponseSQLSchemas{
TypeMeta: v1.TypeMeta{
APIVersion: query.SchemeGroupVersion.String(),
Kind: "QueryResponseSQLSchemas",
},
SQLSchemas: qdr,
})
if err != nil {
errhttp.Write(ctx, err, w)
}
}
func handlePreparedSQLSchema(ctx context.Context, pq *preparedQuery) (expr.SQLSchemas, error) {
func handlePreparedSQLSchema(ctx context.Context, pq *preparedQuery) (query.SQLSchemas, error) {
resp, err := service.GetSQLSchemas(ctx, pq.logger, pq.cache, pq.exprSvc, pq.mReq, pq.builder, pq.headers)
pq.reportMetrics()
return resp, err
@ -158,12 +70,10 @@ func handleSQLSchemaQuery(
raw query.QueryDataRequest,
b QueryAPIBuilder,
httpreq *http.Request,
responder responderWrapper,
connectLogger log.Logger,
) (expr.SQLSchemas, error) {
) (query.SQLSchemas, error) {
pq, err := prepareQuery(ctx, raw, b, httpreq, connectLogger)
if err != nil {
responder.Error(err)
return nil, err
}
return handlePreparedSQLSchema(ctx, pq)

View file

@ -56,12 +56,6 @@ var PathRewriters = []filters.PathRewriter{
return matches[1] + "/name" // connector requires a name
},
},
{
Pattern: regexp.MustCompile(`(/apis/query.grafana.app/v0alpha1/namespaces/.*/sqlschemas$)`),
ReplaceFunc: func(matches []string) string {
return matches[1] + "/name" // connector requires a name
},
},
{
Pattern: regexp.MustCompile(`(/apis/.*/v0alpha1/namespaces/.*/queryconvert$)`),
ReplaceFunc: func(matches []string) string {

View file

@ -6,18 +6,14 @@ import (
context "context"
"fmt"
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/expr"
dashboards "github.com/grafana/grafana/pkg/services/dashboards"
dtos "github.com/grafana/grafana/pkg/api/dtos"
mock "github.com/stretchr/testify/mock"
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
dtos "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/apimachinery/identity"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
dashboards "github.com/grafana/grafana/pkg/services/dashboards"
models "github.com/grafana/grafana/pkg/services/publicdashboards/models"
user "github.com/grafana/grafana/pkg/services/user"
)
@ -590,7 +586,7 @@ func (_m *FakePublicDashboardService) Update(ctx context.Context, u *user.Signed
return r0, r1
}
func (_m *FakePublicDashboardService) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error) {
func (_m *FakePublicDashboardService) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error) {
return nil, fmt.Errorf("not implemented in public dashboards")
}

View file

@ -6,7 +6,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/expr"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/dashboards"
. "github.com/grafana/grafana/pkg/services/publicdashboards/models"
@ -40,7 +40,7 @@ type Service interface {
ExistsEnabledByAccessToken(ctx context.Context, accessToken string) (bool, error)
ExistsEnabledByDashboardUid(ctx context.Context, dashboardUid string) (bool, error)
GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error)
GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error)
}
// ServiceWrapper these methods have different behavior between OSS and Enterprise. The latter would call the OSS service first

View file

@ -8,13 +8,13 @@ import (
"time"
"github.com/google/uuid"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"go.opentelemetry.io/otel"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/expr"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/services/accesscontrol"
@ -513,7 +513,7 @@ func (pd *PublicDashboardServiceImpl) logIsEnabledChanged(existingPubdash *Publi
}
}
func (pd *PublicDashboardServiceImpl) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error) {
func (pd *PublicDashboardServiceImpl) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error) {
return nil, fmt.Errorf("sql schema endpoint not supported with public dashboards")
}

View file

@ -6,6 +6,7 @@ import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/apimachinery/errutil"
"github.com/grafana/grafana/pkg/apimachinery/identity"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/datasources"
@ -13,12 +14,12 @@ import (
"github.com/grafana/grafana/pkg/services/validations"
)
func (s *ServiceImpl) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error) {
func (s *ServiceImpl) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error) {
//TODO DEdupe code
parsedReq, err := s.parseMetricRequest(ctx, user, false, reqDTO, false)
if err != nil {
return expr.SQLSchemas{}, err
return queryV0.SQLSchemas{}, err
}
exprReq := expr.Request{
Queries: []expr.Query{},
@ -55,7 +56,7 @@ func (s *ServiceImpl) GetSQLSchemas(ctx context.Context, user identity.Requester
return s.expressionService.GetSQLSchemas(ctx, exprReq)
}
func GetSQLSchemas(ctx context.Context, log log.Logger, dscache datasources.CacheService, exprService *expr.Service, reqDTO dtos.MetricRequest, qsDatasourceClientBuilder dsquerierclient.QSDatasourceClientBuilder, headers map[string]string) (expr.SQLSchemas, error) {
func GetSQLSchemas(ctx context.Context, log log.Logger, dscache datasources.CacheService, exprService *expr.Service, reqDTO dtos.MetricRequest, qsDatasourceClientBuilder dsquerierclient.QSDatasourceClientBuilder, headers map[string]string) (queryV0.SQLSchemas, error) {
s := &ServiceImpl{
log: log,
dataSourceCache: dscache,

View file

@ -9,13 +9,14 @@ import (
"slices"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"golang.org/x/sync/errgroup"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/apimachinery/errutil"
"github.com/grafana/grafana/pkg/apimachinery/identity"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
@ -74,7 +75,7 @@ type Service interface {
// this is more "forward compatible", for example supports per-query time ranges
QueryDataNew(ctx context.Context, user identity.Requester, skipDSCache bool, reqDTO dtos.MetricRequest) (*backend.QueryDataResponse, error)
GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error)
GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error)
}
// Gives us compile time error if the service does not adhere to the contract of the interface

View file

@ -6,14 +6,12 @@ import (
context "context"
"fmt"
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
dtos "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/expr"
identity "github.com/grafana/grafana/pkg/apimachinery/identity"
mock "github.com/stretchr/testify/mock"
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
dtos "github.com/grafana/grafana/pkg/api/dtos"
identity "github.com/grafana/grafana/pkg/apimachinery/identity"
queryV0 "github.com/grafana/grafana/pkg/apis/query/v0alpha1"
)
// FakeQueryService is an autogenerated mock type for the Service type
@ -99,7 +97,7 @@ func (_m *FakeQueryService) Run(ctx context.Context) error {
return r0
}
func (_m *FakeQueryService) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (expr.SQLSchemas, error) {
func (_m *FakeQueryService) GetSQLSchemas(ctx context.Context, user identity.Requester, reqDTO dtos.MetricRequest) (queryV0.SQLSchemas, error) {
return nil, fmt.Errorf("sql schema endpoint not supported with public dashboards")
}