Prometheus: Update FrameType and make __name__ the field name (#62694)

These changes would make the response more in line with the dataplane contract, changes are under the feature toggle prometheusDataplane
This commit is contained in:
Kyle Brandt 2023-03-29 11:26:32 -04:00 committed by GitHub
parent 845951485f
commit 674144c8e8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 94 additions and 52 deletions

View File

@ -54,50 +54,51 @@ Some stable features are enabled by default. You can disable a stable feature by
These features are early in their development lifecycle and so are not yet supported in Grafana Cloud. These features are early in their development lifecycle and so are not yet supported in Grafana Cloud.
Alpha features might be changed or removed without prior notice. Alpha features might be changed or removed without prior notice.
| Feature toggle name | Description | | Feature toggle name | Description |
| ---------------------------------- | --------------------------------------------------------------------------------------------------------- | | ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `alertingBigTransactions` | Use big transactions for alerting database writes | | `alertingBigTransactions` | Use big transactions for alerting database writes |
| `dashboardPreviews` | Create and show thumbnails for dashboard search results | | `dashboardPreviews` | Create and show thumbnails for dashboard search results |
| `live-service-web-worker` | This will use a webworker thread to processes events rather than the main thread | | `live-service-web-worker` | This will use a webworker thread to processes events rather than the main thread |
| `queryOverLive` | Use Grafana Live WebSocket to execute backend queries | | `queryOverLive` | Use Grafana Live WebSocket to execute backend queries |
| `publicDashboards` | Enables public access to dashboards | | `publicDashboards` | Enables public access to dashboards |
| `publicDashboardsEmailSharing` | Enables public dashboard sharing to be restricted to only allowed emails | | `publicDashboardsEmailSharing` | Enables public dashboard sharing to be restricted to only allowed emails |
| `lokiLive` | Support WebSocket streaming for loki (early prototype) | | `lokiLive` | Support WebSocket streaming for loki (early prototype) |
| `lokiDataframeApi` | Use experimental loki api for WebSocket streaming (early prototype) | | `lokiDataframeApi` | Use experimental loki api for WebSocket streaming (early prototype) |
| `storage` | Configurable storage for dashboards, datasources, and resources | | `storage` | Configurable storage for dashboards, datasources, and resources |
| `exploreMixedDatasource` | Enable mixed datasource in Explore | | `exploreMixedDatasource` | Enable mixed datasource in Explore |
| `newTraceView` | Shows the new trace view design | | `newTraceView` | Shows the new trace view design |
| `correlations` | Correlations page | | `correlations` | Correlations page |
| `datasourceQueryMultiStatus` | Introduce HTTP 207 Multi Status for api/ds/query | | `datasourceQueryMultiStatus` | Introduce HTTP 207 Multi Status for api/ds/query |
| `traceToMetrics` | Enable trace to metrics links | | `traceToMetrics` | Enable trace to metrics links |
| `prometheusWideSeries` | Enable wide series responses in the Prometheus datasource | | `prometheusWideSeries` | Enable wide series responses in the Prometheus datasource |
| `canvasPanelNesting` | Allow elements nesting | | `canvasPanelNesting` | Allow elements nesting |
| `scenes` | Experimental framework to build interactive dashboards | | `scenes` | Experimental framework to build interactive dashboards |
| `disableSecretsCompatibility` | Disable duplicated secret storage in legacy tables | | `disableSecretsCompatibility` | Disable duplicated secret storage in legacy tables |
| `logRequestsInstrumentedAsUnknown` | Logs the path for requests that are instrumented as unknown | | `logRequestsInstrumentedAsUnknown` | Logs the path for requests that are instrumented as unknown |
| `redshiftAsyncQueryDataSupport` | Enable async query data support for Redshift | | `redshiftAsyncQueryDataSupport` | Enable async query data support for Redshift |
| `athenaAsyncQueryDataSupport` | Enable async query data support for Athena | | `athenaAsyncQueryDataSupport` | Enable async query data support for Athena |
| `newPanelChromeUI` | Show updated look and feel of grafana-ui PanelChrome: panel header, icons, and menu | | `newPanelChromeUI` | Show updated look and feel of grafana-ui PanelChrome: panel header, icons, and menu |
| `showDashboardValidationWarnings` | Show warnings when dashboards do not validate against the schema | | `showDashboardValidationWarnings` | Show warnings when dashboards do not validate against the schema |
| `mysqlAnsiQuotes` | Use double quotes to escape keyword in a MySQL query | | `mysqlAnsiQuotes` | Use double quotes to escape keyword in a MySQL query |
| `elasticsearchBackendMigration` | Use Elasticsearch as backend data source | | `elasticsearchBackendMigration` | Use Elasticsearch as backend data source |
| `datasourceOnboarding` | Enable data source onboarding page | | `datasourceOnboarding` | Enable data source onboarding page |
| `emptyDashboardPage` | Enable the redesigned user interface of a dashboard page that includes no panels | | `emptyDashboardPage` | Enable the redesigned user interface of a dashboard page that includes no panels |
| `secureSocksDatasourceProxy` | Enable secure socks tunneling for supported core datasources | | `secureSocksDatasourceProxy` | Enable secure socks tunneling for supported core datasources |
| `authnService` | Use new auth service to perform authentication | | `authnService` | Use new auth service to perform authentication |
| `alertingBacktesting` | Rule backtesting API for alerting | | `alertingBacktesting` | Rule backtesting API for alerting |
| `editPanelCSVDragAndDrop` | Enables drag and drop for CSV and Excel files | | `editPanelCSVDragAndDrop` | Enables drag and drop for CSV and Excel files |
| `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | | `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view |
| `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | | `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals |
| `lokiQuerySplittingConfig` | Give users the option to configure split durations for Loki queries | | `lokiQuerySplittingConfig` | Give users the option to configure split durations for Loki queries |
| `individualCookiePreferences` | Support overriding cookie preferences per user | | `individualCookiePreferences` | Support overriding cookie preferences per user |
| `onlyExternalOrgRoleSync` | Prohibits a user from changing organization roles synced with external auth providers | | `onlyExternalOrgRoleSync` | Prohibits a user from changing organization roles synced with external auth providers |
| `drawerDataSourcePicker` | Changes the user experience for data source selection to a drawer. | | `drawerDataSourcePicker` | Changes the user experience for data source selection to a drawer. |
| `traceqlSearch` | Enables the 'TraceQL Search' tab for the Tempo datasource which provides a UI to generate TraceQL queries | | `traceqlSearch` | Enables the 'TraceQL Search' tab for the Tempo datasource which provides a UI to generate TraceQL queries |
| `prometheusMetricEncyclopedia` | Replaces the Prometheus query builder metric select option with a paginated and filterable component | | `prometheusMetricEncyclopedia` | Replaces the Prometheus query builder metric select option with a paginated and filterable component |
| `timeSeriesTable` | Enable time series table transformer & sparkline cell type | | `timeSeriesTable` | Enable time series table transformer & sparkline cell type |
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy | | `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy |
| `clientTokenRotation` | Replaces the current in-request token rotation so that the client initiates the rotation | | `clientTokenRotation` | Replaces the current in-request token rotation so that the client initiates the rotation |
| `prometheusDataplane` | Changes responses to from Prometheus to be compliant with the dataplane specification. In particular it sets the numeric Field.Name from 'Value' to the value of the `__name__` label when present. |
## Development feature toggles ## Development feature toggles

View File

@ -86,4 +86,5 @@ export interface FeatureToggles {
influxdbBackendMigration?: boolean; influxdbBackendMigration?: boolean;
clientTokenRotation?: boolean; clientTokenRotation?: boolean;
disableElasticsearchBackendExploreQuery?: boolean; disableElasticsearchBackendExploreQuery?: boolean;
prometheusDataplane?: boolean;
} }

View File

@ -454,5 +454,11 @@ var (
State: FeatureStateBeta, State: FeatureStateBeta,
Owner: grafanaObservabilityLogsSquad, Owner: grafanaObservabilityLogsSquad,
}, },
{
Name: "prometheusDataplane",
Description: "Changes responses to from Prometheus to be compliant with the dataplane specification. In particular it sets the numeric Field.Name from 'Value' to the value of the `__name__` label when present.",
State: FeatureStateAlpha,
Owner: grafanaObservabilityMetricsSquad,
},
} }
) )

View File

@ -67,3 +67,4 @@ timeSeriesTable,alpha,@grafana/app-o11y,false,false,false,true
influxdbBackendMigration,alpha,@grafana/observability-metrics,false,false,false,true influxdbBackendMigration,alpha,@grafana/observability-metrics,false,false,false,true
clientTokenRotation,alpha,@grafana/grafana-authnz-team,false,false,false,false clientTokenRotation,alpha,@grafana/grafana-authnz-team,false,false,false,false
disableElasticsearchBackendExploreQuery,beta,@grafana/observability-logs,false,false,false,false disableElasticsearchBackendExploreQuery,beta,@grafana/observability-logs,false,false,false,false
prometheusDataplane,alpha,@grafana/observability-metrics,false,false,false,false

1 Name State Owner requiresDevMode RequiresLicense RequiresRestart FrontendOnly
67 influxdbBackendMigration alpha @grafana/observability-metrics false false false true
68 clientTokenRotation alpha @grafana/grafana-authnz-team false false false false
69 disableElasticsearchBackendExploreQuery beta @grafana/observability-logs false false false false
70 prometheusDataplane alpha @grafana/observability-metrics false false false false

View File

@ -278,4 +278,8 @@ const (
// FlagDisableElasticsearchBackendExploreQuery // FlagDisableElasticsearchBackendExploreQuery
// Disable executing of Elasticsearch Explore queries trough backend // Disable executing of Elasticsearch Explore queries trough backend
FlagDisableElasticsearchBackendExploreQuery = "disableElasticsearchBackendExploreQuery" FlagDisableElasticsearchBackendExploreQuery = "disableElasticsearchBackendExploreQuery"
// FlagPrometheusDataplane
// Changes responses to from Prometheus to be compliant with the dataplane specification. In particular it sets the numeric Field.Name from 'Value' to the value of the `__name__` label when present.
FlagPrometheusDataplane = "prometheusDataplane"
) )

View File

@ -43,6 +43,7 @@ type QueryData struct {
URL string URL string
TimeInterval string TimeInterval string
enableWideSeries bool enableWideSeries bool
enableDataplane bool
exemplarSampler func() exemplar.Sampler exemplarSampler func() exemplar.Sampler
} }
@ -82,6 +83,7 @@ func New(
ID: settings.ID, ID: settings.ID,
URL: settings.URL, URL: settings.URL,
enableWideSeries: features.IsEnabled(featuremgmt.FlagPrometheusWideSeries), enableWideSeries: features.IsEnabled(featuremgmt.FlagPrometheusWideSeries),
enableDataplane: features.IsEnabled(featuremgmt.FlagPrometheusDataplane),
exemplarSampler: exemplarSampler, exemplarSampler: exemplarSampler,
}, nil }, nil
} }

View File

@ -28,6 +28,7 @@ func (s *QueryData) parseResponse(ctx context.Context, q *models.Query, res *htt
r := converter.ReadPrometheusStyleResult(iter, converter.Options{ r := converter.ReadPrometheusStyleResult(iter, converter.Options{
MatrixWideSeries: s.enableWideSeries, MatrixWideSeries: s.enableWideSeries,
VectorWideSeries: s.enableWideSeries, VectorWideSeries: s.enableWideSeries,
Dataplane: s.enableDataplane,
}) })
// Add frame to attach metadata // Add frame to attach metadata

View File

@ -21,6 +21,7 @@ func logf(format string, a ...interface{}) {
type Options struct { type Options struct {
MatrixWideSeries bool MatrixWideSeries bool
VectorWideSeries bool VectorWideSeries bool
Dataplane bool
} }
// ReadPrometheusStyleResult will read results from a prometheus or loki server and return data frames // ReadPrometheusStyleResult will read results from a prometheus or loki server and return data frames
@ -115,15 +116,15 @@ func readPrometheusData(iter *jsoniter.Iterator, opt Options) backend.DataRespon
switch resultType { switch resultType {
case "matrix": case "matrix":
if opt.MatrixWideSeries { if opt.MatrixWideSeries {
rsp = readMatrixOrVectorWide(iter, resultType) rsp = readMatrixOrVectorWide(iter, resultType, opt)
} else { } else {
rsp = readMatrixOrVectorMulti(iter, resultType) rsp = readMatrixOrVectorMulti(iter, resultType, opt)
} }
case "vector": case "vector":
if opt.VectorWideSeries { if opt.VectorWideSeries {
rsp = readMatrixOrVectorWide(iter, resultType) rsp = readMatrixOrVectorWide(iter, resultType, opt)
} else { } else {
rsp = readMatrixOrVectorMulti(iter, resultType) rsp = readMatrixOrVectorMulti(iter, resultType, opt)
} }
case "streams": case "streams":
rsp = readStream(iter) rsp = readStream(iter)
@ -355,7 +356,7 @@ func readScalar(iter *jsoniter.Iterator) backend.DataResponse {
frame := data.NewFrame("", timeField, valueField) frame := data.NewFrame("", timeField, valueField)
frame.Meta = &data.FrameMeta{ frame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti, Type: data.FrameTypeNumericMulti,
Custom: resultTypeToCustomMeta("scalar"), Custom: resultTypeToCustomMeta("scalar"),
} }
@ -364,16 +365,25 @@ func readScalar(iter *jsoniter.Iterator) backend.DataResponse {
} }
} }
func readMatrixOrVectorWide(iter *jsoniter.Iterator, resultType string) backend.DataResponse { func readMatrixOrVectorWide(iter *jsoniter.Iterator, resultType string, opt Options) backend.DataResponse {
rowIdx := 0 rowIdx := 0
timeMap := map[int64]int{} timeMap := map[int64]int{}
timeField := data.NewFieldFromFieldType(data.FieldTypeTime, 0) timeField := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
timeField.Name = data.TimeSeriesTimeFieldName timeField.Name = data.TimeSeriesTimeFieldName
frame := data.NewFrame("", timeField) frame := data.NewFrame("", timeField)
frame.Meta = &data.FrameMeta{
frame.Meta = &data.FrameMeta{ // Overwritten if histogram
Type: data.FrameTypeTimeSeriesWide, Type: data.FrameTypeTimeSeriesWide,
Custom: resultTypeToCustomMeta(resultType), Custom: resultTypeToCustomMeta(resultType),
} }
if opt.Dataplane && resultType == "vector" {
frame.Meta.Type = data.FrameTypeNumericWide
}
if opt.Dataplane {
frame.Meta.TypeVersion = data.FrameTypeVersion{0, 1}
}
rsp := backend.DataResponse{ rsp := backend.DataResponse{
Frames: []*data.Frame{}, Frames: []*data.Frame{},
} }
@ -390,6 +400,11 @@ func readMatrixOrVectorWide(iter *jsoniter.Iterator, resultType string) backend.
switch l1Field { switch l1Field {
case "metric": case "metric":
iter.ReadVal(&valueField.Labels) iter.ReadVal(&valueField.Labels)
if opt.Dataplane {
if n, ok := valueField.Labels["__name__"]; ok {
valueField.Name = n
}
}
case "value": case "value":
timeMap, rowIdx = addValuePairToFrame(frame, timeMap, rowIdx, iter) timeMap, rowIdx = addValuePairToFrame(frame, timeMap, rowIdx, iter)
@ -472,7 +487,7 @@ func addValuePairToFrame(frame *data.Frame, timeMap map[int64]int, rowIdx int, i
return timeMap, rowIdx return timeMap, rowIdx
} }
func readMatrixOrVectorMulti(iter *jsoniter.Iterator, resultType string) backend.DataResponse { func readMatrixOrVectorMulti(iter *jsoniter.Iterator, resultType string, opt Options) backend.DataResponse {
rsp := backend.DataResponse{} rsp := backend.DataResponse{}
for iter.ReadArray() { for iter.ReadArray() {
@ -488,6 +503,11 @@ func readMatrixOrVectorMulti(iter *jsoniter.Iterator, resultType string) backend
switch l1Field { switch l1Field {
case "metric": case "metric":
iter.ReadVal(&valueField.Labels) iter.ReadVal(&valueField.Labels)
if opt.Dataplane {
if n, ok := valueField.Labels["__name__"]; ok {
valueField.Name = n
}
}
case "value": case "value":
t, v, err := readTimeValuePair(iter) t, v, err := readTimeValuePair(iter)
@ -548,6 +568,12 @@ func readMatrixOrVectorMulti(iter *jsoniter.Iterator, resultType string) backend
Type: data.FrameTypeTimeSeriesMulti, Type: data.FrameTypeTimeSeriesMulti,
Custom: resultTypeToCustomMeta(resultType), Custom: resultTypeToCustomMeta(resultType),
} }
if opt.Dataplane && resultType == "vector" {
frame.Meta.Type = data.FrameTypeNumericMulti
}
if opt.Dataplane {
frame.Meta.TypeVersion = data.FrameTypeVersion{0, 1}
}
rsp.Frames = append(rsp.Frames, frame) rsp.Frames = append(rsp.Frames, frame)
} }
} }