CloudWatch Logs: Support Log Anomalies query type (#113067)

This commit is contained in:
Ida Štambuk 2025-10-29 18:47:33 +01:00 committed by GitHub
parent de88abafdd
commit 30bd4e7dba
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 1080 additions and 35 deletions

View File

@ -218,6 +218,11 @@ export interface QueryEditorArrayExpression {
export type QueryEditorExpression = (QueryEditorArrayExpression | QueryEditorPropertyExpression | QueryEditorGroupByExpression | QueryEditorFunctionExpression | QueryEditorFunctionParameterExpression | QueryEditorOperatorExpression);
export enum LogsMode {
Anomalies = 'Anomalies',
Insights = 'Insights',
}
export enum LogsQueryLanguage {
CWLI = 'CWLI',
PPL = 'PPL',
@ -241,6 +246,10 @@ export interface CloudWatchLogsQuery extends common.DataQuery {
* Log groups to query
*/
logGroups?: Array<LogGroup>;
/**
* Whether a query is a Logs Insights or Logs Anomalies query
*/
logsMode?: LogsMode;
/**
* Language used for querying logs, can be CWLI, SQL, or PPL. If empty, the default language is CWLI.
*/
@ -265,6 +274,33 @@ export const defaultCloudWatchLogsQuery: Partial<CloudWatchLogsQuery> = {
statsGroups: [],
};
/**
* Shape of a Cloudwatch Logs Anomalies query
*/
export interface CloudWatchLogsAnomaliesQuery extends common.DataQuery {
/**
* Used to filter only the anomalies found by a certain anomaly detector
*/
anomalyDetectionARN?: string;
id: string;
/**
* Whether a query is a Logs Insights or Logs Anomalies query
*/
logsMode?: LogsMode;
/**
* Whether a query is a Metrics, Logs or Annotations query
*/
queryMode?: CloudWatchQueryMode;
/**
* AWS region to query for the logs
*/
region: string;
/**
* Filter to return only anomalies that are 'SUPPRESSED', 'UNSUPPRESSED', or 'ALL' (default)
*/
suppressionState?: string;
}
export interface LogGroup {
/**
* AccountId of the log group

View File

@ -36,7 +36,7 @@ const (
headerFromAlert = "FromAlert"
defaultRegion = "default"
logsQueryMode = "Logs"
queryModeLogs = "Logs"
// QueryTypes
annotationQuery = "annotationQuery"
logAction = "logAction"
@ -45,7 +45,8 @@ const (
type DataQueryJson struct {
dataquery.CloudWatchAnnotationQuery
Type string `json:"type,omitempty"`
Type string `json:"type,omitempty"`
LogsMode dataquery.LogsMode `json:"logsMode,omitempty"`
}
type DataSource struct {
@ -147,12 +148,22 @@ func (ds *DataSource) QueryData(ctx context.Context, req *backend.QueryDataReque
if model.QueryMode != "" {
queryMode = string(model.QueryMode)
}
fromPublicDashboard := model.Type == "" && queryMode == logsQueryMode
isSyncLogQuery := ((fromAlert || fromExpression) && queryMode == logsQueryMode) || fromPublicDashboard
fromPublicDashboard := model.Type == ""
isLogInsightsQuery := queryMode == queryModeLogs && (model.LogsMode == "" || model.LogsMode == dataquery.LogsModeInsights)
isSyncLogQuery := isLogInsightsQuery && ((fromAlert || fromExpression) || fromPublicDashboard)
if isSyncLogQuery {
return executeSyncLogQuery(ctx, ds, req)
}
isLogsAnomaliesQuery := model.QueryMode == dataquery.CloudWatchQueryModeLogs && model.LogsMode == dataquery.LogsModeAnomalies
if isLogsAnomaliesQuery {
return executeLogAnomaliesQuery(ctx, ds, req)
}
var result *backend.QueryDataResponse
switch model.Type {
case annotationQuery:

View File

@ -285,6 +285,13 @@ func NewQueryEditorOperatorValueType() *QueryEditorOperatorValueType {
return NewStringOrBoolOrInt64OrArrayOfQueryEditorOperatorType()
}
type LogsMode string
const (
LogsModeInsights LogsMode = "Insights"
LogsModeAnomalies LogsMode = "Anomalies"
)
type LogsQueryLanguage string
const (
@ -297,7 +304,9 @@ const (
type CloudWatchLogsQuery struct {
// Whether a query is a Metrics, Logs, or Annotations query
QueryMode CloudWatchQueryMode `json:"queryMode"`
Id string `json:"id"`
// Whether a query is a Logs Insights or Logs Anomalies query
LogsMode *LogsMode `json:"logsMode,omitempty"`
Id string `json:"id"`
// AWS region to query for the logs
Region string `json:"region"`
// The CloudWatch Logs Insights query to execute
@ -347,6 +356,40 @@ func NewLogGroup() *LogGroup {
return &LogGroup{}
}
// Shape of a Cloudwatch Logs Anomalies query
type CloudWatchLogsAnomaliesQuery struct {
Id string `json:"id"`
// AWS region to query for the logs
Region string `json:"region"`
// Whether a query is a Metrics, Logs or Annotations query
QueryMode *CloudWatchQueryMode `json:"queryMode,omitempty"`
// Whether a query is a Logs Insights or Logs Anomalies query
LogsMode *LogsMode `json:"logsMode,omitempty"`
// Filter to return only anomalies that are 'SUPPRESSED', 'UNSUPPRESSED', or 'ALL' (default)
SuppressionState *string `json:"suppressionState,omitempty"`
// A unique identifier for the query within the list of targets.
// In server side expressions, the refId is used as a variable name to identify results.
// By default, the UI will assign A->Z; however setting meaningful names may be useful.
RefId string `json:"refId"`
// If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel.
Hide *bool `json:"hide,omitempty"`
// Specify the query flavor
// TODO make this required and give it a default
QueryType *string `json:"queryType,omitempty"`
// Used to filter only the anomalies found by a certain anomaly detector
AnomalyDetectionARN *string `json:"anomalyDetectionARN,omitempty"`
// For mixed data sources the selected datasource is on the query level.
// For non mixed scenarios this is undefined.
// TODO find a better way to do this ^ that's friendly to schema
// TODO this shouldn't be unknown but DataSourceRef | null
Datasource any `json:"datasource,omitempty"`
}
// NewCloudWatchLogsAnomaliesQuery creates a new CloudWatchLogsAnomaliesQuery object.
func NewCloudWatchLogsAnomaliesQuery() *CloudWatchLogsAnomaliesQuery {
return &CloudWatchLogsAnomaliesQuery{}
}
// Shape of a CloudWatch Annotation query
// TS type is CloudWatchDefaultQuery = Omit<CloudWatchLogsQuery, 'queryMode'> & CloudWatchMetricsQuery, declared in veneer
// #CloudWatchDefaultQuery: #CloudWatchLogsQuery & #CloudWatchMetricsQuery @cuetsy(kind="type")

View File

@ -0,0 +1,146 @@
package cloudwatch
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs"
cloudwatchLogsTypes "github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs/types"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/tsdb/cloudwatch/kinds/dataquery"
)
var executeLogAnomaliesQuery = func(ctx context.Context, ds *DataSource, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
var anomaliesQuery dataquery.CloudWatchLogsAnomaliesQuery
err := json.Unmarshal(q.JSON, &anomaliesQuery)
if err != nil {
continue
}
region := anomaliesQuery.Region
if region == "" || region == defaultRegion {
anomaliesQuery.Region = ds.Settings.Region
}
logsClient, err := ds.getCWLogsClient(ctx, region)
if err != nil {
return nil, err
}
listAnomaliesInput := &cloudwatchlogs.ListAnomaliesInput{}
if anomaliesQuery.SuppressionState != nil {
listAnomaliesInput.SuppressionState = getSuppressionState(*anomaliesQuery.SuppressionState)
}
if anomaliesQuery.AnomalyDetectionARN == nil || *anomaliesQuery.AnomalyDetectionARN != "" {
listAnomaliesInput.AnomalyDetectorArn = anomaliesQuery.AnomalyDetectionARN
}
response, err := logsClient.ListAnomalies(ctx, listAnomaliesInput)
if err != nil {
result := backend.NewQueryDataResponse()
result.Responses[q.RefID] = backend.ErrorResponseWithErrorSource(backend.DownstreamError(fmt.Errorf("%v: %w", "failed to call cloudwatch:ListAnomalies", err)))
return result, nil
}
dataframe, err := logsAnomaliesResultsToDataframes(response)
if err != nil {
return nil, err
}
respD := resp.Responses[q.RefID]
respD.Frames = data.Frames{dataframe}
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func logsAnomaliesResultsToDataframes(response *cloudwatchlogs.ListAnomaliesOutput) (*data.Frame, error) {
frame := data.NewFrame("Log anomalies")
if len(response.Anomalies) == 0 {
return frame, nil
}
n := len(response.Anomalies)
anomalyArns := make([]string, n)
descriptions := make([]string, n)
suppressedStatus := make([]bool, n)
priorities := make([]string, n)
patterns := make([]string, n)
statuses := make([]string, n)
logGroupArnLists := make([]string, n)
firstSeens := make([]time.Time, n)
lastSeens := make([]time.Time, n)
logTrends := make([]*json.RawMessage, n)
for i, anomaly := range response.Anomalies {
anomalyArns[i] = *anomaly.AnomalyDetectorArn
descriptions[i] = *anomaly.Description
suppressedStatus[i] = *anomaly.Suppressed
priorities[i] = *anomaly.Priority
if anomaly.PatternString != nil {
patterns[i] = *anomaly.PatternString
}
statuses[i] = string(anomaly.State)
logGroupArnLists[i] = strings.Join(anomaly.LogGroupArnList, ",")
firstSeens[i] = time.UnixMilli(anomaly.FirstSeen)
lastSeens[i] = time.UnixMilli(anomaly.LastSeen)
// data.Frame returned from the backend cannot contain fields of type data.Frames
// so histogram is kept as json.RawMessageto be built as sparkline table cell on the FE
histogramField := anomaly.Histogram
histogramJSON, err := json.Marshal(histogramField)
if err != nil {
logTrends[i] = nil
} else {
rawMsg := json.RawMessage(histogramJSON)
logTrends[i] = &rawMsg
}
}
newFields := make([]*data.Field, 0, len(response.Anomalies))
newFields = append(newFields, data.NewField("state", nil, statuses).SetConfig(&data.FieldConfig{DisplayName: "State"}))
newFields = append(newFields, data.NewField("description", nil, descriptions).SetConfig(&data.FieldConfig{DisplayName: "Anomaly"}))
newFields = append(newFields, data.NewField("priority", nil, priorities).SetConfig(&data.FieldConfig{DisplayName: "Priority"}))
newFields = append(newFields, data.NewField("patternString", nil, patterns).SetConfig(&data.FieldConfig{DisplayName: "Log Pattern"}))
// FE expects the field name to be logTrend in order to identify histogram field for sparkline rendering
newFields = append(newFields, data.NewField("logTrend", nil, logTrends).SetConfig(&data.FieldConfig{DisplayName: "Log Trend"}))
newFields = append(newFields, data.NewField("firstSeen", nil, firstSeens).SetConfig(&data.FieldConfig{DisplayName: "First seen"}))
newFields = append(newFields, data.NewField("lastSeen", nil, lastSeens).SetConfig(&data.FieldConfig{DisplayName: "Last seen"}))
newFields = append(newFields, data.NewField("suppressed", nil, suppressedStatus).SetConfig(&data.FieldConfig{DisplayName: "Suppressed?"}))
newFields = append(newFields, data.NewField("logGroupArnList", nil, logGroupArnLists).SetConfig(&data.FieldConfig{DisplayName: "Log Groups"}))
newFields = append(newFields, data.NewField("anomalyArn", nil, anomalyArns).SetConfig(&data.FieldConfig{DisplayName: "Anomaly Arn"}))
frame.Fields = newFields
setPreferredVisType(frame, data.VisTypeTable)
return frame, nil
}
func getSuppressionState(suppressionState string) cloudwatchLogsTypes.SuppressionState {
switch suppressionState {
case "suppressed":
return cloudwatchLogsTypes.SuppressionStateSuppressed
case "unsuppressed":
return cloudwatchLogsTypes.SuppressionStateUnsuppressed
case "all":
return ""
default:
return ""
}
}

View File

@ -0,0 +1,212 @@
package cloudwatch
import (
"context"
"encoding/json"
"testing"
"time"
"github.com/aws/aws-sdk-go-v2/aws"
cloudwatchLogsTypes "github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs/types"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/cloudwatch/models"
"github.com/stretchr/testify/assert"
)
func Test_executeLogAnomaliesQuery(t *testing.T) {
origNewCWClient := NewCWClient
t.Cleanup(func() {
NewCWClient = origNewCWClient
})
var cli fakeCWLogsClient
NewCWLogsClient = func(aws.Config) models.CWLogsClient {
return &cli
}
t.Run("getCWLogsClient is called with correct suppression state", func(t *testing.T) {
testcases := []struct {
name string
suppressionStateInQuery string
result cloudwatchLogsTypes.SuppressionState
}{
{
"suppressed state",
"suppressed",
cloudwatchLogsTypes.SuppressionStateSuppressed,
},
{
"unsuppressed state",
"unsuppressed",
cloudwatchLogsTypes.SuppressionStateUnsuppressed,
},
{
"empty state",
"",
"",
},
{
"all state",
"all",
"",
},
}
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
cli = fakeCWLogsClient{anomalies: []cloudwatchLogsTypes.Anomaly{}}
ds := newTestDatasource()
_, err := ds.QueryData(context.Background(), &backend.QueryDataRequest{
Headers: map[string]string{headerFromAlert: ""},
PluginContext: backend.PluginContext{DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{}},
Queries: []backend.DataQuery{
{
TimeRange: backend.TimeRange{From: time.Unix(0, 0), To: time.Unix(1, 0)},
JSON: json.RawMessage(`{
"queryMode": "Logs",
"logsMode": "Anomalies",
"suppressionState": "` + tc.suppressionStateInQuery + `",
"region": "us-east-1"
}`),
},
},
})
assert.NoError(t, err)
assert.Equal(t, tc.result, cli.calls.listAnomalies[0].SuppressionState)
})
}
})
}
func Test_executeLogAnomaliesQuery_returns_data_frames(t *testing.T) {
origNewCWClient := NewCWClient
t.Cleanup(func() {
NewCWClient = origNewCWClient
})
var cli fakeCWLogsClient
NewCWLogsClient = func(aws.Config) models.CWLogsClient {
return &cli
}
t.Run("returns log anomalies data frames", func(t *testing.T) {
cli = fakeCWLogsClient{anomalies: []cloudwatchLogsTypes.Anomaly{
{
AnomalyId: aws.String("anomaly-1"),
AnomalyDetectorArn: aws.String("arn:aws:logs:us-east-1:123456789012:anomaly-detector:anomaly-detector-1"),
FirstSeen: 1622505600000, // June 1, 2021 00:00:00 GMT
LastSeen: 1622592000000, // June 2, 2021 00:00:00 GMT
LogGroupArnList: []string{"arn:aws:logs:us-east-1:1234567:log-group-1:id-1", "arn:aws:logs:us-east-1:1234567:log-group-2:id-2"},
Description: aws.String("Description 1"),
State: cloudwatchLogsTypes.StateActive,
Priority: aws.String("high"),
PatternString: aws.String(`{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite",,"instance":"instance"-5:Token-6,"job":"kubernetes-service-endpoints","pod_name":"pod_name"-9,"prom_metric_type":"counter"}`),
Suppressed: aws.Bool(false),
Histogram: map[string]int64{
"1622505600000": 5,
"1622519200000": 10,
"1622532800000": 7,
},
},
{
AnomalyId: aws.String("anomaly-2"),
AnomalyDetectorArn: aws.String("arn:aws:logs:us-east-1:123456789012:anomaly-detector:anomaly-detector-2"),
FirstSeen: 1622592000000, // June 2, 2021 00:00:00 GMT
LastSeen: 1622678400000, // June 3, 2021 00:00:00 GMT
LogGroupArnList: []string{"arn:aws:logs:us-east-1:1234567:log-group-1:id-3", "arn:aws:logs:us-east-1:1234567:log-group-2:id-4"},
Description: aws.String("Description 2"),
State: cloudwatchLogsTypes.StateSuppressed,
Priority: aws.String("low"),
PatternString: aws.String(`{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite","dotnet_collection_count_total":"dotnet_collection_count_total"-3}`),
Suppressed: aws.Bool(true),
Histogram: map[string]int64{
"1622592000000": 3,
},
},
}}
ds := newTestDatasource()
resp, err := ds.QueryData(context.Background(), &backend.QueryDataRequest{
Headers: map[string]string{headerFromAlert: ""},
PluginContext: backend.PluginContext{DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{}},
Queries: []backend.DataQuery{
{
TimeRange: backend.TimeRange{From: time.Unix(0, 0), To: time.Unix(1, 0)},
JSON: json.RawMessage(`{
"queryMode": "Logs",
"logsMode": "Anomalies",
"suppressionState": "all",
"region": "us-east-1"
}`),
},
},
})
assert.NoError(t, err)
assert.Len(t, resp.Responses, 1)
for _, r := range resp.Responses {
assert.Len(t, r.Frames, 1)
frame := r.Frames[0]
assert.Equal(t, "Log anomalies", frame.Name)
assert.Len(t, frame.Fields, 10)
stateField := frame.Fields[0]
assert.Equal(t, "state", stateField.Name)
assert.Equal(t, "Active", stateField.At(0))
assert.Equal(t, "Suppressed", stateField.At(1))
descriptionField := frame.Fields[1]
assert.Equal(t, "description", descriptionField.Name)
assert.Equal(t, "Description 1", descriptionField.At(0))
assert.Equal(t, "Description 2", descriptionField.At(1))
priorityField := frame.Fields[2]
assert.Equal(t, "priority", priorityField.Name)
assert.Equal(t, "high", priorityField.At(0))
assert.Equal(t, "low", priorityField.At(1))
patternStringField := frame.Fields[3]
assert.Equal(t, "patternString", patternStringField.Name)
assert.Equal(t, `{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite",,"instance":"instance"-5:Token-6,"job":"kubernetes-service-endpoints","pod_name":"pod_name"-9,"prom_metric_type":"counter"}`, patternStringField.At(0))
assert.Equal(t, `{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite","dotnet_collection_count_total":"dotnet_collection_count_total"-3}`, patternStringField.At(1))
histogramField := frame.Fields[4]
assert.Equal(t, "logTrend", histogramField.Name)
histogram0 := histogramField.At(0).(*json.RawMessage)
var histData0 map[string]int64
err = json.Unmarshal(*histogram0, &histData0)
assert.NoError(t, err)
assert.Equal(t, int64(5), histData0["1622505600000"])
assert.Equal(t, int64(10), histData0["1622519200000"])
assert.Equal(t, int64(7), histData0["1622532800000"])
firstSeenField := frame.Fields[5]
assert.Equal(t, "firstSeen", firstSeenField.Name)
assert.Equal(t, time.Unix(1622505600, 0), firstSeenField.At(0))
assert.Equal(t, time.Unix(1622592000, 0), firstSeenField.At(1))
lastSeenField := frame.Fields[6]
assert.Equal(t, "lastSeen", lastSeenField.Name)
assert.Equal(t, time.Unix(1622592000, 0), lastSeenField.At(0))
assert.Equal(t, time.Unix(1622678400, 0), lastSeenField.At(1))
suppressedField := frame.Fields[7]
assert.Equal(t, "suppressed", suppressedField.Name)
assert.Equal(t, false, suppressedField.At(0))
assert.Equal(t, true, suppressedField.At(1))
logGroupArnListField := frame.Fields[8]
assert.Equal(t, "logGroupArnList", logGroupArnListField.Name)
assert.Equal(t, "arn:aws:logs:us-east-1:1234567:log-group-1:id-1,arn:aws:logs:us-east-1:1234567:log-group-2:id-2", logGroupArnListField.At(0))
assert.Equal(t, "arn:aws:logs:us-east-1:1234567:log-group-1:id-3,arn:aws:logs:us-east-1:1234567:log-group-2:id-4", logGroupArnListField.At(1))
anomalyDetectorArnField := frame.Fields[9]
assert.Equal(t, "anomalyArn", anomalyDetectorArnField.Name)
assert.Equal(t, "arn:aws:logs:us-east-1:123456789012:anomaly-detector:anomaly-detector-1", anomalyDetectorArnField.At(0))
assert.Equal(t, "arn:aws:logs:us-east-1:123456789012:anomaly-detector:anomaly-detector-2", anomalyDetectorArnField.At(1))
}
})
}

View File

@ -137,7 +137,7 @@ func Test_executeSyncLogQuery(t *testing.T) {
executeSyncLogQuery = origExecuteSyncLogQuery
})
t.Run("when query mode is 'Logs' and does not include type or subtype", func(t *testing.T) {
t.Run("when query mode is 'Logs Insights' and does not include type or subtype", func(t *testing.T) {
origExecuteSyncLogQuery := executeSyncLogQuery
syncCalled := false
executeSyncLogQuery = func(ctx context.Context, e *DataSource, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {

View File

@ -66,3 +66,9 @@ func (m *MockLogEvents) GetLogEvents(ctx context.Context, input *cloudwatchlogs.
return args.Get(0).(*cloudwatchlogs.GetLogEventsOutput), args.Error(1)
}
func (m *MockLogEvents) ListAnomalies(ctx context.Context, input *cloudwatchlogs.ListAnomaliesInput, optFns ...func(*cloudwatchlogs.Options)) (*cloudwatchlogs.ListAnomaliesOutput, error) {
args := m.Called(ctx, input, optFns)
return args.Get(0).(*cloudwatchlogs.ListAnomaliesOutput), args.Error(1)
}

View File

@ -76,6 +76,7 @@ type CWLogsClient interface {
cloudwatchlogs.GetLogEventsAPIClient
cloudwatchlogs.DescribeLogGroupsAPIClient
cloudwatchlogs.ListAnomaliesAPIClient
}
type CWClient interface {

View File

@ -29,12 +29,15 @@ type fakeCWLogsClient struct {
queryResults cloudwatchlogs.GetQueryResultsOutput
logGroupsIndex int
anomalies []cloudwatchlogstypes.Anomaly
}
type logsQueryCalls struct {
startQuery []*cloudwatchlogs.StartQueryInput
getEvents []*cloudwatchlogs.GetLogEventsInput
describeLogGroups []*cloudwatchlogs.DescribeLogGroupsInput
listAnomalies []*cloudwatchlogs.ListAnomaliesInput
}
func (m *fakeCWLogsClient) GetQueryResults(_ context.Context, _ *cloudwatchlogs.GetQueryResultsInput, _ ...func(*cloudwatchlogs.Options)) (*cloudwatchlogs.GetQueryResultsOutput, error) {
@ -55,6 +58,14 @@ func (m *fakeCWLogsClient) StopQuery(_ context.Context, _ *cloudwatchlogs.StopQu
}, nil
}
func (m *fakeCWLogsClient) ListAnomalies(_ context.Context, input *cloudwatchlogs.ListAnomaliesInput, _ ...func(*cloudwatchlogs.Options)) (*cloudwatchlogs.ListAnomaliesOutput, error) {
m.calls.listAnomalies = append(m.calls.listAnomalies, input)
return &cloudwatchlogs.ListAnomaliesOutput{
Anomalies: m.anomalies,
}, nil
}
type mockLogsSyncClient struct {
mock.Mock
}
@ -80,6 +91,11 @@ func (m *mockLogsSyncClient) StartQuery(ctx context.Context, input *cloudwatchlo
return args.Get(0).(*cloudwatchlogs.StartQueryOutput), args.Error(1)
}
func (m *mockLogsSyncClient) ListAnomalies(ctx context.Context, input *cloudwatchlogs.ListAnomaliesInput, optFns ...func(*cloudwatchlogs.Options)) (*cloudwatchlogs.ListAnomaliesOutput, error) {
args := m.Called(ctx, input, optFns)
return args.Get(0).(*cloudwatchlogs.ListAnomaliesOutput), args.Error(1)
}
func (m *fakeCWLogsClient) DescribeLogGroups(_ context.Context, input *cloudwatchlogs.DescribeLogGroupsInput, _ ...func(*cloudwatchlogs.Options)) (*cloudwatchlogs.DescribeLogGroupsOutput, error) {
m.calls.describeLogGroups = append(m.calls.describeLogGroups, input)
output := &m.logGroups[m.logGroupsIndex]

View File

@ -0,0 +1,41 @@
import { EditorField, EditorRow } from '@grafana/plugin-ui';
import { Combobox, Input } from '@grafana/ui';
import { CloudWatchLogsAnomaliesQuery } from '../../../dataquery.gen';
interface Props {
query: CloudWatchLogsAnomaliesQuery;
onChange: (value: CloudWatchLogsAnomaliesQuery) => void;
}
const supressionStateOptions = [
{ label: 'All', value: 'all' },
{ label: 'Suppressed', value: 'suppressed' },
{ label: 'Unsuppressed', value: 'unsuppressed' },
];
export const LogsAnomaliesQueryEditor = (props: Props) => {
return (
<>
<EditorRow>
<EditorField label="Anomaly Detection ARN">
<Input
value={props.query.anomalyDetectionARN || ''}
onChange={(e) => {
props.onChange({ ...props.query, anomalyDetectionARN: e.currentTarget.value });
}}
/>
</EditorField>
<EditorField label="Supression state">
<Combobox
value={props.query.suppressionState ?? 'all'}
options={supressionStateOptions}
onChange={(e) => {
props.onChange({ ...props.query, suppressionState: e.value });
}}
/>
</EditorField>
</EditorRow>
</>
);
};

View File

@ -6,9 +6,10 @@ import { InlineSelect } from '@grafana/plugin-ui';
import { CloudWatchDatasource } from '../../../datasource';
import { DEFAULT_CWLI_QUERY_STRING, DEFAULT_PPL_QUERY_STRING, DEFAULT_SQL_QUERY_STRING } from '../../../defaultQueries';
import { CloudWatchJsonData, CloudWatchLogsQuery, CloudWatchQuery, LogsQueryLanguage } from '../../../types';
import { CloudWatchJsonData, CloudWatchLogsQuery, CloudWatchQuery, LogsMode, LogsQueryLanguage } from '../../../types';
import { CloudWatchLink } from './CloudWatchLink';
import { LogsAnomaliesQueryEditor } from './LogsAnomaliesQueryEditor';
import { CloudWatchLogsQueryField } from './LogsQueryField';
type Props = QueryEditorProps<CloudWatchDatasource, CloudWatchQuery, CloudWatchJsonData> & {
@ -22,6 +23,11 @@ const logsQueryLanguageOptions: Array<SelectableValue<LogsQueryLanguage>> = [
{ label: 'OpenSearch PPL', value: LogsQueryLanguage.PPL },
];
const logsModeOptions: Array<SelectableValue<LogsMode>> = [
{ label: 'Logs Insights', value: LogsMode.Insights },
{ label: 'Logs Anomalies', value: LogsMode.Anomalies },
];
export const CloudWatchLogsQueryEditor = memo(function CloudWatchLogsQueryEditor(props: Props) {
const { query, data, datasource, onChange, extraHeaderElementLeft } = props;
@ -42,6 +48,13 @@ export const CloudWatchLogsQueryEditor = memo(function CloudWatchLogsQueryEditor
[isQueryNew, onChange, query]
);
const onLogsModeChange = useCallback(
(logsMode: LogsMode | undefined) => {
onChange({ ...query, logsMode });
},
[query, onChange]
);
// if the query has already been saved from before, we shouldn't replace it with a default one
useEffectOnce(() => {
if (query.expression) {
@ -58,20 +71,32 @@ export const CloudWatchLogsQueryEditor = memo(function CloudWatchLogsQueryEditor
useEffect(() => {
extraHeaderElementLeft?.(
<InlineSelect
label="Query language"
value={query.queryLanguage || LogsQueryLanguage.CWLI}
options={logsQueryLanguageOptions}
onChange={({ value }) => {
onQueryLanguageChange(value);
}}
/>
<>
<InlineSelect
label="Logs Mode"
value={query.logsMode || LogsMode.Insights}
options={logsModeOptions}
onChange={({ value }) => {
onLogsModeChange(value);
}}
/>
{query.logsMode !== LogsMode.Anomalies && (
<InlineSelect
label="Query language"
value={query.queryLanguage || LogsQueryLanguage.CWLI}
options={logsQueryLanguageOptions}
onChange={({ value }) => {
onQueryLanguageChange(value);
}}
/>
)}
</>
);
return () => {
extraHeaderElementLeft?.(undefined);
};
}, [extraHeaderElementLeft, onChange, onQueryLanguageChange, query]);
}, [extraHeaderElementLeft, onChange, onQueryLanguageChange, query, onLogsModeChange]);
const onQueryStringChange = (query: CloudWatchQuery) => {
onChange(query);
@ -79,11 +104,17 @@ export const CloudWatchLogsQueryEditor = memo(function CloudWatchLogsQueryEditor
};
return (
<CloudWatchLogsQueryField
{...props}
onChange={onQueryStringChange}
ExtraFieldElement={<CloudWatchLink query={query} panelData={data} datasource={datasource} />}
/>
<>
{query.logsMode === LogsMode.Anomalies ? (
<LogsAnomaliesQueryEditor query={query} onChange={onChange} />
) : (
<CloudWatchLogsQueryField
{...props}
onChange={onQueryStringChange}
ExtraFieldElement={<CloudWatchLink query={query} panelData={data} datasource={datasource} />}
/>
)}
</>
);
});

View File

@ -146,7 +146,7 @@ composableKinds: DataQuery: {
} @cuetsy(kind="interface")
#QueryEditorExpression: #QueryEditorArrayExpression | #QueryEditorPropertyExpression | #QueryEditorGroupByExpression | #QueryEditorFunctionExpression | #QueryEditorFunctionParameterExpression | #QueryEditorOperatorExpression @cuetsy(kind="type")
#LogsMode: "Insights" | "Anomalies" @cuetsy(kind="enum")
#LogsQueryLanguage: "CWLI" | "SQL" | "PPL" @cuetsy(kind="enum")
// Shape of a CloudWatch Logs query
@ -155,6 +155,8 @@ composableKinds: DataQuery: {
// Whether a query is a Metrics, Logs, or Annotations query
queryMode: #CloudWatchQueryMode
// Whether a query is a Logs Insights or Logs Anomalies query
logsMode?: #LogsMode
id: string
// AWS region to query for the logs
region: string
@ -166,9 +168,27 @@ composableKinds: DataQuery: {
logGroups?: [...#LogGroup]
// @deprecated use logGroups
logGroupNames?: [...string]
// Language used for querying logs, can be CWLI, SQL, or PPL. If empty, the default language is CWLI.
queryLanguage?: #LogsQueryLanguage
} @cuetsy(kind="interface")
// Shape of a Cloudwatch Logs Anomalies query
#CloudWatchLogsAnomaliesQuery: {
common.DataQuery
id: string
// AWS region to query for the logs
region: string
// Whether a query is a Metrics, Logs or Annotations query
queryMode?: #CloudWatchQueryMode
// Whether a query is a Logs Insights or Logs Anomalies query
logsMode?: #LogsMode
// Filter to return only anomalies that are 'SUPPRESSED', 'UNSUPPRESSED', or 'ALL' (default)
suppressionState?: string
// Used to filter only the anomalies found by a certain anomaly detector
anomalyDetectionARN?: string
} @cuetsy(kind="interface")
#LogGroup: {
// ARN of the log group
arn: string

View File

@ -216,6 +216,11 @@ export interface QueryEditorArrayExpression {
export type QueryEditorExpression = (QueryEditorArrayExpression | QueryEditorPropertyExpression | QueryEditorGroupByExpression | QueryEditorFunctionExpression | QueryEditorFunctionParameterExpression | QueryEditorOperatorExpression);
export enum LogsMode {
Anomalies = 'Anomalies',
Insights = 'Insights',
}
export enum LogsQueryLanguage {
CWLI = 'CWLI',
PPL = 'PPL',
@ -239,6 +244,10 @@ export interface CloudWatchLogsQuery extends common.DataQuery {
* Log groups to query
*/
logGroups?: Array<LogGroup>;
/**
* Whether a query is a Logs Insights or Logs Anomalies query
*/
logsMode?: LogsMode;
/**
* Language used for querying logs, can be CWLI, SQL, or PPL. If empty, the default language is CWLI.
*/
@ -263,6 +272,33 @@ export const defaultCloudWatchLogsQuery: Partial<CloudWatchLogsQuery> = {
statsGroups: [],
};
/**
* Shape of a Cloudwatch Logs Anomalies query
*/
export interface CloudWatchLogsAnomaliesQuery extends common.DataQuery {
/**
* Used to filter only the anomalies found by a certain anomaly detector
*/
anomalyDetectionARN?: string;
id: string;
/**
* Whether a query is a Logs Insights or Logs Anomalies query
*/
logsMode?: LogsMode;
/**
* Whether a query is a Metrics, Logs or Annotations query
*/
queryMode?: CloudWatchQueryMode;
/**
* AWS region to query for the logs
*/
region: string;
/**
* Filter to return only anomalies that are 'SUPPRESSED', 'UNSUPPRESSED', or 'ALL' (default)
*/
suppressionState?: string;
}
export interface LogGroup {
/**
* AccountId of the log group

View File

@ -306,7 +306,7 @@ describe('datasource', () => {
});
});
it('should add a data link field to log queries', async () => {
it('should add links to log insights queries', async () => {
const { datasource } = setupForLogs();
const observable = datasource.query({
@ -439,7 +439,7 @@ describe('datasource', () => {
const { datasource } = setupMockedDataSource();
expect(datasource.getDefaultQuery(CoreApp.PanelEditor).queryMode).toEqual('Metrics');
});
it('should set default log groups in default query', () => {
it('should set default log groups in default logs insights query', () => {
const { datasource } = setupMockedDataSource({
customInstanceSettings: {
...CloudWatchSettings,

View File

@ -16,7 +16,12 @@ import { DataSourceWithBackend, TemplateSrv, getTemplateSrv } from '@grafana/run
import { CloudWatchAnnotationSupport } from './annotationSupport';
import { DEFAULT_METRICS_QUERY, getDefaultLogsQuery } from './defaultQueries';
import { isCloudWatchAnnotationQuery, isCloudWatchLogsQuery, isCloudWatchMetricsQuery } from './guards';
import {
isCloudWatchAnnotationQuery,
isCloudWatchLogsQuery,
isCloudWatchMetricsQuery,
isLogsAnomaliesQuery,
} from './guards';
import { CloudWatchLogsLanguageProvider } from './language/cloudwatch-logs/CloudWatchLogsLanguageProvider';
import {
LogsSQLCompletionItemProvider,
@ -40,6 +45,7 @@ import { ResourcesAPI } from './resources/ResourcesAPI';
import {
CloudWatchAnnotationQuery,
CloudWatchJsonData,
CloudWatchLogsAnomaliesQuery,
CloudWatchLogsQuery,
CloudWatchMetricsQuery,
CloudWatchQuery,
@ -104,11 +110,14 @@ export class CloudWatchDatasource
const logQueries: CloudWatchLogsQuery[] = [];
const metricsQueries: CloudWatchMetricsQuery[] = [];
const logsAnomaliesQueries: CloudWatchLogsAnomaliesQuery[] = [];
const annotationQueries: CloudWatchAnnotationQuery[] = [];
queries.forEach((query) => {
if (isCloudWatchAnnotationQuery(query)) {
annotationQueries.push(query);
} else if (isLogsAnomaliesQuery(query)) {
logsAnomaliesQueries.push(query);
} else if (isCloudWatchLogsQuery(query)) {
logQueries.push(query);
} else {
@ -127,6 +136,12 @@ export class CloudWatchDatasource
);
}
if (logsAnomaliesQueries.length) {
dataQueryResponses.push(
this.logsQueryRunner.handleLogAnomaliesQueries(logsAnomaliesQueries, options, super.query.bind(this))
);
}
if (annotationQueries.length) {
dataQueryResponses.push(
this.annotationQueryRunner.handleAnnotationQuery(annotationQueries, options, super.query.bind(this))

View File

@ -1,10 +1,26 @@
import { AnnotationQuery } from '@grafana/data';
import { CloudWatchAnnotationQuery, CloudWatchLogsQuery, CloudWatchMetricsQuery, CloudWatchQuery } from './types';
import {
CloudWatchAnnotationQuery,
CloudWatchLogsAnomaliesQuery,
CloudWatchLogsQuery,
CloudWatchMetricsQuery,
CloudWatchQuery,
LogsMode,
} from './types';
export const isCloudWatchLogsQuery = (cloudwatchQuery: CloudWatchQuery): cloudwatchQuery is CloudWatchLogsQuery =>
cloudwatchQuery.queryMode === 'Logs';
export const isLogsAnomaliesQuery = (
cloudwatchQuery: CloudWatchQuery
): cloudwatchQuery is CloudWatchLogsAnomaliesQuery => {
if (isCloudWatchLogsQuery(cloudwatchQuery)) {
return cloudwatchQuery.logsMode === LogsMode.Anomalies;
}
return false;
};
export const isCloudWatchMetricsQuery = (cloudwatchQuery: CloudWatchQuery): cloudwatchQuery is CloudWatchMetricsQuery =>
cloudwatchQuery.queryMode === 'Metrics' || !cloudwatchQuery.hasOwnProperty('queryMode'); // in early versions of this plugin, queryMode wasn't defined in a CloudWatchMetricsQuery

View File

@ -2,20 +2,26 @@ import { lastValueFrom, of } from 'rxjs';
import {
DataQueryRequest,
DataQueryResponse,
Field,
FieldType,
LogLevel,
LogRowContextQueryDirection,
LogRowModel,
MutableDataFrame,
} from '@grafana/data';
import { regionVariable } from '../mocks/CloudWatchDataSource';
import { setupMockedLogsQueryRunner } from '../mocks/LogsQueryRunner';
import { LogsRequestMock } from '../mocks/Request';
import { validLogsQuery } from '../mocks/queries';
import { CloudWatchLogsQuery } from '../types'; // Add this import statement
import { TimeRangeMock } from '../mocks/timeRange';
import { CloudWatchLogsAnomaliesQuery, CloudWatchLogsQuery, LogsMode } from '../types'; // Add this import statement
import { LOGSTREAM_IDENTIFIER_INTERNAL, LOG_IDENTIFIER_INTERNAL } from './CloudWatchLogsQueryRunner';
import {
LOGSTREAM_IDENTIFIER_INTERNAL,
LOG_IDENTIFIER_INTERNAL,
convertTrendHistogramToSparkline,
} from './CloudWatchLogsQueryRunner';
describe('CloudWatchLogsQueryRunner', () => {
beforeEach(() => {
@ -28,14 +34,15 @@ describe('CloudWatchLogsQueryRunner', () => {
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame({
dataFrame: {
refId: 'B',
length: 1,
fields: [
{ name: 'ts', type: FieldType.time, values: [1] },
{ name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {} },
{ name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {} },
{ name: 'ts', type: FieldType.time, values: [1], config: {} },
{ name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {}, config: {} },
{ name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {}, config: {} },
],
}),
},
entry: '4',
labels: {},
hasAnsi: false,
@ -481,6 +488,120 @@ describe('CloudWatchLogsQueryRunner', () => {
});
});
});
describe('handleLogAnomaliesQueries', () => {
it('appends -anomalies to the requestId', async () => {
const { runner, queryMock } = setupMockedLogsQueryRunner();
const logsAnomaliesRequestMock: DataQueryRequest<CloudWatchLogsAnomaliesQuery> = {
requestId: 'mockId',
range: TimeRangeMock,
rangeRaw: { from: TimeRangeMock.from, to: TimeRangeMock.to },
targets: [
{
id: '1',
logsMode: LogsMode.Anomalies,
queryMode: 'Logs',
refId: 'A',
region: 'us-east-1',
},
],
interval: '',
intervalMs: 0,
scopedVars: { __interval: { value: '20s' } },
timezone: '',
app: '',
startTime: 0,
};
await expect(
runner.handleLogAnomaliesQueries(LogsRequestMock.targets, logsAnomaliesRequestMock, queryMock)
).toEmitValuesWith(() => {
expect(queryMock.mock.calls[0][0].requestId).toEqual('mockId-logsAnomalies');
});
});
it('processes log trend histogram data correctly', async () => {
const response = structuredClone(anomaliesQueryResponse);
convertTrendHistogramToSparkline(response);
expect(response.data[0].fields.find((field: Field) => field.name === 'Log trend')).toEqual({
name: 'Log trend',
type: 'frame',
config: {
custom: {
drawStyle: 'bars',
cellOptions: {
type: 'sparkline',
hideValue: true,
},
},
},
values: [
{
name: 'Trend_row_0',
length: 8,
fields: [
{
name: 'time',
type: 'time',
values: [
1760454000000, 1760544000000, 1760724000000, 1761282000000, 1761300000000, 1761354000000,
1761372000000, 1761390000000,
],
config: {},
},
{
name: 'value',
type: 'number',
values: [81, 35, 35, 36, 36, 36, 72, 36],
config: {},
},
],
},
{
name: 'Trend_row_1',
length: 2,
fields: [
{
name: 'time',
type: 'time',
values: [1760687665000, 1760687670000],
config: {},
},
{
name: 'value',
type: 'number',
values: [3, 3],
config: {},
},
],
},
],
});
});
it('replaces log trend histogram field at the same index in the frame', () => {
const response = structuredClone(anomaliesQueryResponse);
convertTrendHistogramToSparkline(response);
expect(response.data[0].fields[4].name).toEqual('Log trend');
});
it('ignore invalid timestamps in log trend histogram', () => {
const response = structuredClone(anomaliesQueryResponse);
response.data[0].fields[4].values[1] = {
invalidTimestamp: 3,
'1760687670000': 3,
anotherInvalidTimestamp: 2,
'1760687670010': 3,
};
convertTrendHistogramToSparkline(response);
expect(response.data[0].fields[4].values[1].fields[0].values.length).toEqual(2);
expect(response.data[0].fields[4].values[1].fields[1].values.length).toEqual(2);
});
});
});
const rawLogQueriesStub: CloudWatchLogsQuery[] = [
@ -641,3 +762,166 @@ const getQueryErrorResponseStub = {
const stopQueryResponseStub = {
state: 'Done',
};
const anomaliesQueryResponse: DataQueryResponse = {
data: [
{
name: 'Logs anomalies',
refId: 'A',
meta: {
preferredVisualisationType: 'table',
},
fields: [
{
name: 'state',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'State',
},
values: ['Active', 'Active'],
entities: {},
},
{
name: 'description',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'Anomaly',
},
values: [
'50.0% increase in count of value "405" for "code"-3',
'151.3% increase in count of value 1 for "dotnet_collection_count_total"-3',
],
entities: {},
},
{
name: 'priority',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'Priority',
},
values: ['MEDIUM', 'MEDIUM'],
entities: {},
},
{
name: 'patternString',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'Log Pattern',
},
values: [
'{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite","Timestamp":<*>,"Version":<*>,"code":<*>,"container_name":"petsite","http_requests_received_total":<*>,"instance":<*>:<*>,"job":"kubernetes-service-endpoints","kubernetes_node":<*>,"method":<*>,"pod_name":<*>,"prom_metric_type":"counter"}',
'{"ClusterName":"PetSite","Namespace":"default","Service":"service-petsite","Timestamp":<*>,"Version":<*>,"container_name":"petsite","dotnet_collection_count_total":<*>,"generation":<*>,"instance":<*>:<*>,"job":"kubernetes-service-endpoints","kubernetes_node":<*>,"pod_name":<*>,"prom_metric_type":"counter"}',
],
entities: {},
},
{
name: 'logTrend',
type: 'other',
typeInfo: {
frame: 'json.RawMessage',
nullable: true,
},
config: {
displayName: 'Log Trend',
},
values: [
{
'1760454000000': 81,
'1760544000000': 35,
'1760724000000': 35,
'1761282000000': 36,
'1761300000000': 36,
'1761354000000': 36,
'1761372000000': 72,
'1761390000000': 36,
},
{
'1760687665000': 3,
'1760687670000': 3,
},
],
entities: {},
},
{
name: 'firstSeen',
type: 'time',
typeInfo: {
frame: 'time.Time',
},
config: {
displayName: 'First seen',
},
values: [1760462460000, 1760687640000],
entities: {},
},
{
name: 'lastSeen',
type: 'time',
typeInfo: {
frame: 'time.Time',
},
config: {
displayName: 'Last seen',
},
values: [1761393660000, 1760687940000],
entities: {},
},
{
name: 'suppressed',
type: 'boolean',
typeInfo: {
frame: 'bool',
},
config: {
displayName: 'Suppressed?',
},
values: [false, false],
entities: {},
},
{
name: 'logGroupArnList',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'Log Groups',
},
values: [
'arn:aws:logs:us-east-2:569069006612:log-group:/aws/containerinsights/PetSite/prometheus',
'arn:aws:logs:us-east-2:569069006612:log-group:/aws/containerinsights/PetSite/prometheus',
],
entities: {},
},
{
name: 'anomalyArn',
type: 'string',
typeInfo: {
frame: 'string',
},
config: {
displayName: 'Anomaly Arn',
},
values: [
'arn:aws:logs:us-east-2:569069006612:anomaly-detector:dca8b129-d09d-4167-86e9-7bf62ede2f95',
'arn:aws:logs:us-east-2:569069006612:anomaly-detector:dca8b129-d09d-4167-86e9-7bf62ede2f95',
],
entities: {},
},
],
length: 2,
},
],
};

View File

@ -23,6 +23,8 @@ import {
DataQueryRequest,
DataQueryResponse,
DataSourceInstanceSettings,
Field,
FieldType,
LoadingState,
LogRowContextOptions,
LogRowContextQueryDirection,
@ -33,15 +35,19 @@ import {
} from '@grafana/data';
import { TemplateSrv } from '@grafana/runtime';
import { type CustomFormatterVariable } from '@grafana/scenes';
import { GraphDrawStyle } from '@grafana/schema/dist/esm/index';
import { TableCellDisplayMode } from '@grafana/ui';
import {
CloudWatchJsonData,
CloudWatchLogsAnomaliesQuery,
CloudWatchLogsQuery,
CloudWatchLogsQueryStatus,
CloudWatchLogsRequest,
CloudWatchQuery,
GetLogEventsRequest,
LogAction,
LogsMode,
LogsQueryLanguage,
QueryParam,
StartQueryRequest,
@ -101,6 +107,7 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
logGroups,
logGroupNames,
queryLanguage: target.queryLanguage,
logsMode: target.logsMode ?? LogsMode.Insights,
};
});
@ -136,6 +143,63 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
);
};
public handleLogAnomaliesQueries = (
logAnomaliesQueries: CloudWatchLogsAnomaliesQuery[],
options: DataQueryRequest<CloudWatchQuery>,
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
): Observable<DataQueryResponse> => {
const logAnomalyTargets: StartQueryRequest[] = logAnomaliesQueries.map((target: CloudWatchLogsAnomaliesQuery) => {
return {
refId: target.refId,
region: this.templateSrv.replace(this.getActualRegion(target.region)),
queryString: '',
logGroups: [],
logsMode: LogsMode.Anomalies,
suppressionState: target.suppressionState || 'all',
anomalyDetectionARN: target.anomalyDetectionARN || '',
};
});
const range = options?.range || getDefaultTimeRange();
// append -logsAnomalies to prevent requestId from matching metric or logs queries from the same panel
const requestId = options?.requestId ? `${options?.requestId}-logsAnomalies` : '';
const requestParams: DataQueryRequest<CloudWatchLogsAnomaliesQuery> = {
...options,
range,
skipQueryCache: true,
requestId,
interval: options?.interval || '', // dummy
intervalMs: options?.intervalMs || 1, // dummy
scopedVars: options?.scopedVars || {}, // dummy
timezone: options?.timezone || '', // dummy
app: options?.app || '', // dummy
startTime: options?.startTime || 0, // dummy
targets: logAnomalyTargets.map((t) => ({
...t,
id: '',
queryMode: 'Logs',
refId: t.refId || 'A',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasource: this.ref,
type: 'logAction',
logsMode: LogsMode.Anomalies,
})),
};
return queryFn(requestParams).pipe(
mergeMap((dataQueryResponse) => {
return from(
(async () => {
convertTrendHistogramToSparkline(dataQueryResponse);
return dataQueryResponse;
})()
);
})
);
};
/**
* Called by datasource.ts, invoked when user clicks on a log row in the logs visualization and the "show context button"
*/
@ -450,7 +514,7 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
const hasMissingLogGroups = !query.logGroups?.length;
const hasMissingQueryString = !query.expression?.length;
// log groups are not mandatory if language is SQL
// log groups are not mandatory if language is SQL and LogsMode is not Insights
const isInvalidCWLIQuery = query.queryLanguage !== 'SQL' && hasMissingLogGroups && hasMissingLegacyLogGroupNames;
if (isInvalidCWLIQuery || hasMissingQueryString) {
return false;
@ -479,3 +543,69 @@ function parseLogGroupName(logIdentifier: string): string {
const colonIndex = logIdentifier.lastIndexOf(':');
return logIdentifier.slice(colonIndex + 1);
}
const LOG_TREND_FIELD_NAME = 'logTrend';
/**
* Takes DataQueryResponse and converts any "log trend" fields (that are in JSON.rawMessage form)
* into data frame fields that the table vis will be able to display
*/
export function convertTrendHistogramToSparkline(dataQueryResponse: DataQueryResponse): void {
dataQueryResponse.data.forEach((frame) => {
let fieldIndexToReplace = null;
// log trend histogram field from CW API is of shape Record<timestamp as string, value>
const sparklineRawData: Field<Record<string, number>> = frame.fields.find((field: Field, index: number) => {
if (field.name === LOG_TREND_FIELD_NAME && field.type === FieldType.other) {
fieldIndexToReplace = index;
return true;
}
return false;
});
if (sparklineRawData) {
const sparklineField: Field = {
name: 'Log trend',
type: FieldType.frame,
config: {
custom: {
drawStyle: GraphDrawStyle.Bars,
cellOptions: {
type: TableCellDisplayMode.Sparkline,
// hiding the value here as it's not useful or clear on what it represents for log trend
hideValue: true,
},
},
},
values: [],
};
sparklineRawData.values.forEach((sparklineValue, rowIndex) => {
const timestamps: number[] = [];
const values: number[] = [];
Object.keys(sparklineValue).map((t, i) => {
let n = Number(t);
if (!isNaN(n)) {
timestamps.push(n);
values.push(sparklineValue[t]);
}
});
const sparklineFieldFrame: DataFrame = {
name: `Trend_row_${rowIndex}`,
length: timestamps.length,
fields: [
{ name: 'time', type: FieldType.time, values: timestamps, config: {} },
{ name: 'value', type: FieldType.number, values, config: {} },
],
};
sparklineField.values.push(sparklineFieldFrame);
});
if (fieldIndexToReplace) {
// Make sure sparkline field is placed in the same order as coming from BE
frame.fields[fieldIndexToReplace] = sparklineField;
}
}
});
}

View File

@ -10,6 +10,7 @@ export type CloudWatchQuery =
| raw.CloudWatchMetricsQuery
| raw.CloudWatchLogsQuery
| raw.CloudWatchAnnotationQuery
| raw.CloudWatchLogsAnomaliesQuery
| CloudWatchDefaultQuery;
// We want to allow setting defaults for both Logs and Metrics queries