2022-03-02 22:41:07 +08:00
package loganalytics
2020-04-27 23:43:02 +08:00
import (
2020-05-12 01:11:03 +08:00
"bytes"
"compress/gzip"
2020-04-27 23:43:02 +08:00
"context"
2023-03-30 00:01:18 +08:00
"encoding/base64"
2020-04-27 23:43:02 +08:00
"encoding/json"
"fmt"
2022-08-10 21:37:51 +08:00
"io"
2020-04-27 23:43:02 +08:00
"net/http"
2023-03-30 00:01:18 +08:00
"net/url"
2020-04-27 23:43:02 +08:00
"path"
2021-05-20 16:16:29 +08:00
"regexp"
2023-04-28 03:24:11 +08:00
"sort"
"strings"
2021-06-07 20:54:51 +08:00
"time"
2020-04-27 23:43:02 +08:00
2021-06-07 20:54:51 +08:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
2024-01-27 06:53:55 +08:00
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
2023-10-05 07:38:26 +08:00
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
2020-06-06 00:32:10 +08:00
"github.com/grafana/grafana-plugin-sdk-go/data"
2022-04-12 02:20:10 +08:00
"go.opentelemetry.io/otel/attribute"
2023-10-03 20:54:20 +08:00
"go.opentelemetry.io/otel/trace"
2023-04-28 03:24:11 +08:00
"k8s.io/utils/strings/slices"
2022-04-12 02:20:10 +08:00
2023-04-28 03:24:11 +08:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
2022-03-02 22:41:07 +08:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
2020-04-27 23:43:02 +08:00
)
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
2021-07-16 18:47:26 +08:00
type AzureLogAnalyticsDatasource struct {
2024-01-27 06:53:55 +08:00
Proxy types . ServiceProxy
Logger log . Logger
2021-07-16 18:47:26 +08:00
}
2020-04-27 23:43:02 +08:00
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
// from the UI
type AzureLogAnalyticsQuery struct {
2023-05-17 01:30:09 +08:00
RefID string
2023-10-18 23:19:35 +08:00
ResultFormat dataquery . ResultFormat
2023-05-17 01:30:09 +08:00
URL string
TraceExploreQuery string
TraceParentExploreQuery string
TraceLogsExploreQuery string
JSON json . RawMessage
TimeRange backend . TimeRange
Query string
Resources [ ] string
2023-10-18 23:19:35 +08:00
QueryType dataquery . AzureQueryType
2023-07-04 17:45:31 +08:00
AppInsightsQuery bool
2023-09-19 01:38:39 +08:00
DashboardTime bool
TimeColumn string
2020-04-27 23:43:02 +08:00
}
2023-08-30 21:39:31 +08:00
func ( e * AzureLogAnalyticsDatasource ) ResourceRequest ( rw http . ResponseWriter , req * http . Request , cli * http . Client ) ( http . ResponseWriter , error ) {
return e . Proxy . Do ( rw , req , cli )
2021-07-16 18:47:26 +08:00
}
2020-04-27 23:43:02 +08:00
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
2021-06-07 20:54:51 +08:00
// 3. parses the responses for each query into data frames
2023-10-05 07:38:26 +08:00
func ( e * AzureLogAnalyticsDatasource ) ExecuteTimeSeriesQuery ( ctx context . Context , originalQueries [ ] backend . DataQuery , dsInfo types . DatasourceInfo , client * http . Client , url string ) ( * backend . QueryDataResponse , error ) {
2021-06-07 20:54:51 +08:00
result := backend . NewQueryDataResponse ( )
2023-10-05 07:38:26 +08:00
queries , err := e . buildQueries ( ctx , originalQueries , dsInfo )
2020-04-27 23:43:02 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
return nil , err
2020-04-27 23:43:02 +08:00
}
for _ , query := range queries {
2023-10-05 07:38:26 +08:00
res , err := e . executeQuery ( ctx , query , dsInfo , client , url )
2023-08-30 21:39:31 +08:00
if err != nil {
result . Responses [ query . RefID ] = backend . DataResponse { Error : err }
continue
}
result . Responses [ query . RefID ] = * res
2020-04-27 23:43:02 +08:00
}
return result , nil
}
2023-07-04 17:45:31 +08:00
func getApiURL ( resourceOrWorkspace string , isAppInsightsQuery bool ) string {
2021-05-20 16:16:29 +08:00
matchesResourceURI , _ := regexp . MatchString ( "^/subscriptions/" , resourceOrWorkspace )
if matchesResourceURI {
2023-07-04 17:45:31 +08:00
if isAppInsightsQuery {
componentName := resourceOrWorkspace [ strings . LastIndex ( resourceOrWorkspace , "/" ) + 1 : ]
return fmt . Sprintf ( "v1/apps/%s/query" , componentName )
}
2021-05-20 16:16:29 +08:00
return fmt . Sprintf ( "v1%s/query" , resourceOrWorkspace )
} else {
return fmt . Sprintf ( "v1/workspaces/%s/query" , resourceOrWorkspace )
}
}
2023-10-05 07:38:26 +08:00
func ( e * AzureLogAnalyticsDatasource ) buildQueries ( ctx context . Context , queries [ ] backend . DataQuery , dsInfo types . DatasourceInfo ) ( [ ] * AzureLogAnalyticsQuery , error ) {
2020-04-27 23:43:02 +08:00
azureLogAnalyticsQueries := [ ] * AzureLogAnalyticsQuery { }
2023-07-04 17:45:31 +08:00
appInsightsRegExp , err := regexp . Compile ( "providers/Microsoft.Insights/components" )
if err != nil {
return nil , fmt . Errorf ( "failed to compile Application Insights regex" )
}
2020-04-27 23:43:02 +08:00
for _ , query := range queries {
2023-04-28 03:24:11 +08:00
resources := [ ] string { }
var resourceOrWorkspace string
var queryString string
2023-06-11 04:30:49 +08:00
var resultFormat dataquery . ResultFormat
2023-07-04 17:45:31 +08:00
appInsightsQuery := false
2023-04-28 03:24:11 +08:00
traceExploreQuery := ""
2023-05-17 01:30:09 +08:00
traceParentExploreQuery := ""
2023-04-28 03:24:11 +08:00
traceLogsExploreQuery := ""
2023-09-19 01:38:39 +08:00
dashboardTime := false
timeColumn := ""
2023-04-28 03:24:11 +08:00
if query . QueryType == string ( dataquery . AzureQueryTypeAzureLogAnalytics ) {
queryJSONModel := types . LogJSONQuery { }
err := json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Log Analytics query object from JSON: %w" , err )
}
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
2023-06-11 04:30:49 +08:00
if azureLogAnalyticsTarget . ResultFormat != nil {
resultFormat = * azureLogAnalyticsTarget . ResultFormat
}
2023-04-28 03:24:11 +08:00
if resultFormat == "" {
resultFormat = types . TimeSeries
}
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
// to a resource URI, so they should use the new resource-centric.
if len ( azureLogAnalyticsTarget . Resources ) > 0 {
resources = azureLogAnalyticsTarget . Resources
resourceOrWorkspace = azureLogAnalyticsTarget . Resources [ 0 ]
2023-07-04 17:45:31 +08:00
appInsightsQuery = appInsightsRegExp . Match ( [ ] byte ( resourceOrWorkspace ) )
2023-06-11 04:30:49 +08:00
} else if azureLogAnalyticsTarget . Resource != nil && * azureLogAnalyticsTarget . Resource != "" {
resources = [ ] string { * azureLogAnalyticsTarget . Resource }
resourceOrWorkspace = * azureLogAnalyticsTarget . Resource
} else if azureLogAnalyticsTarget . Workspace != nil {
resourceOrWorkspace = * azureLogAnalyticsTarget . Workspace
2023-04-28 03:24:11 +08:00
}
2023-06-11 04:30:49 +08:00
if azureLogAnalyticsTarget . Query != nil {
queryString = * azureLogAnalyticsTarget . Query
}
2023-07-17 19:02:16 +08:00
2023-09-19 01:38:39 +08:00
if azureLogAnalyticsTarget . DashboardTime != nil {
dashboardTime = * azureLogAnalyticsTarget . DashboardTime
if dashboardTime {
if azureLogAnalyticsTarget . TimeColumn != nil {
timeColumn = * azureLogAnalyticsTarget . TimeColumn
} else {
// Final fallback to TimeGenerated if no column is provided
timeColumn = "TimeGenerated"
}
}
2023-07-17 19:02:16 +08:00
}
2020-05-26 22:52:33 +08:00
}
2023-04-28 03:24:11 +08:00
if query . QueryType == string ( dataquery . AzureQueryTypeAzureTraces ) {
queryJSONModel := types . TracesJSONQuery { }
err := json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Traces query object from JSON: %w" , err )
}
azureTracesTarget := queryJSONModel . AzureTraces
if azureTracesTarget . ResultFormat == nil {
resultFormat = types . Table
} else {
2023-05-26 16:16:01 +08:00
resultFormat = * azureTracesTarget . ResultFormat
2023-04-28 03:24:11 +08:00
if resultFormat == "" {
resultFormat = types . Table
}
}
2020-04-27 23:43:02 +08:00
2023-04-28 03:24:11 +08:00
resources = azureTracesTarget . Resources
resourceOrWorkspace = azureTracesTarget . Resources [ 0 ]
2023-07-04 17:45:31 +08:00
appInsightsQuery = appInsightsRegExp . Match ( [ ] byte ( resourceOrWorkspace ) )
2023-05-09 22:29:36 +08:00
resourcesMap := make ( map [ string ] bool , 0 )
if len ( resources ) > 1 {
for _ , resource := range resources {
resourcesMap [ strings . ToLower ( resource ) ] = true
}
// Remove the base resource as that's where the query is run anyway
delete ( resourcesMap , strings . ToLower ( resourceOrWorkspace ) )
}
2023-04-28 03:24:11 +08:00
operationId := ""
2023-05-09 22:29:36 +08:00
if queryJSONModel . AzureTraces . OperationId != nil && * queryJSONModel . AzureTraces . OperationId != "" {
2023-04-28 03:24:11 +08:00
operationId = * queryJSONModel . AzureTraces . OperationId
2023-10-05 07:38:26 +08:00
resourcesMap , err = getCorrelationWorkspaces ( ctx , resourceOrWorkspace , resourcesMap , dsInfo , operationId )
2023-05-09 22:29:36 +08:00
if err != nil {
return nil , fmt . Errorf ( "failed to retrieve correlation resources for operation ID - %s: %s" , operationId , err )
}
2023-04-28 03:24:11 +08:00
}
2023-05-10 18:27:02 +08:00
queryResources := make ( [ ] string , 0 )
for resource := range resourcesMap {
queryResources = append ( queryResources , resource )
}
sort . Strings ( queryResources )
2023-05-17 01:30:09 +08:00
queryString = buildTracesQuery ( operationId , nil , queryJSONModel . AzureTraces . TraceTypes , queryJSONModel . AzureTraces . Filters , & resultFormat , queryResources )
2023-04-28 03:24:11 +08:00
traceIdVariable := "${__data.fields.traceID}"
2023-05-17 01:30:09 +08:00
parentSpanIdVariable := "${__data.fields.parentSpanID}"
2023-04-28 03:24:11 +08:00
if operationId == "" {
2023-05-17 01:30:09 +08:00
traceExploreQuery = buildTracesQuery ( traceIdVariable , nil , queryJSONModel . AzureTraces . TraceTypes , queryJSONModel . AzureTraces . Filters , & resultFormat , queryResources )
traceParentExploreQuery = buildTracesQuery ( traceIdVariable , & parentSpanIdVariable , queryJSONModel . AzureTraces . TraceTypes , queryJSONModel . AzureTraces . Filters , & resultFormat , queryResources )
2023-05-10 18:27:02 +08:00
traceLogsExploreQuery = buildTracesLogsQuery ( traceIdVariable , queryResources )
2023-04-28 03:24:11 +08:00
} else {
traceExploreQuery = queryString
2023-05-17 01:30:09 +08:00
traceParentExploreQuery = buildTracesQuery ( operationId , & parentSpanIdVariable , queryJSONModel . AzureTraces . TraceTypes , queryJSONModel . AzureTraces . Filters , & resultFormat , queryResources )
2023-05-10 18:27:02 +08:00
traceLogsExploreQuery = buildTracesLogsQuery ( operationId , queryResources )
2023-04-28 03:24:11 +08:00
}
2023-08-30 21:39:31 +08:00
traceExploreQuery , err = macros . KqlInterpolate ( query , dsInfo , traceExploreQuery , "TimeGenerated" )
2023-04-28 03:24:11 +08:00
if err != nil {
return nil , fmt . Errorf ( "failed to create traces explore query: %s" , err )
}
2023-08-30 21:39:31 +08:00
traceParentExploreQuery , err = macros . KqlInterpolate ( query , dsInfo , traceParentExploreQuery , "TimeGenerated" )
2023-05-17 01:30:09 +08:00
if err != nil {
return nil , fmt . Errorf ( "failed to create parent span traces explore query: %s" , err )
}
2023-08-30 21:39:31 +08:00
traceLogsExploreQuery , err = macros . KqlInterpolate ( query , dsInfo , traceLogsExploreQuery , "TimeGenerated" )
2023-04-28 03:24:11 +08:00
if err != nil {
return nil , fmt . Errorf ( "failed to create traces logs explore query: %s" , err )
}
2023-07-17 19:02:16 +08:00
2023-09-19 01:38:39 +08:00
dashboardTime = true
timeColumn = "timestamp"
2020-04-27 23:43:02 +08:00
}
2023-07-04 17:45:31 +08:00
apiURL := getApiURL ( resourceOrWorkspace , appInsightsQuery )
2020-04-27 23:43:02 +08:00
2023-08-30 21:39:31 +08:00
rawQuery , err := macros . KqlInterpolate ( query , dsInfo , queryString , "TimeGenerated" )
2020-04-27 23:43:02 +08:00
if err != nil {
return nil , err
}
azureLogAnalyticsQueries = append ( azureLogAnalyticsQueries , & AzureLogAnalyticsQuery {
2023-05-17 01:30:09 +08:00
RefID : query . RefID ,
2023-10-18 23:19:35 +08:00
ResultFormat : resultFormat ,
2023-05-17 01:30:09 +08:00
URL : apiURL ,
JSON : query . JSON ,
TimeRange : query . TimeRange ,
Query : rawQuery ,
Resources : resources ,
2023-10-18 23:19:35 +08:00
QueryType : dataquery . AzureQueryType ( query . QueryType ) ,
2023-05-17 01:30:09 +08:00
TraceExploreQuery : traceExploreQuery ,
TraceParentExploreQuery : traceParentExploreQuery ,
TraceLogsExploreQuery : traceLogsExploreQuery ,
2023-07-04 17:45:31 +08:00
AppInsightsQuery : appInsightsQuery ,
2023-09-19 01:38:39 +08:00
DashboardTime : dashboardTime ,
TimeColumn : timeColumn ,
2020-04-27 23:43:02 +08:00
} )
}
return azureLogAnalyticsQueries , nil
}
2023-10-05 07:38:26 +08:00
func ( e * AzureLogAnalyticsDatasource ) executeQuery ( ctx context . Context , query * AzureLogAnalyticsQuery , dsInfo types . DatasourceInfo , client * http . Client , url string ) ( * backend . DataResponse , error ) {
2021-07-14 15:53:24 +08:00
// If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs , ok := dsInfo . JSONData [ "azureLogAnalyticsSameAs" ] ; ok && ! sameAs . ( bool ) {
2023-08-30 21:39:31 +08:00
return nil , fmt . Errorf ( "credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials" )
2021-07-14 15:53:24 +08:00
}
2023-04-28 03:24:11 +08:00
queryJSONModel := dataquery . AzureMonitorQuery { }
err := json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2023-04-28 03:24:11 +08:00
}
2023-10-18 23:19:35 +08:00
if query . QueryType == dataquery . AzureQueryTypeAzureTraces {
if query . ResultFormat == dataquery . ResultFormatTrace && query . Query == "" {
2023-08-30 21:39:31 +08:00
return nil , fmt . Errorf ( "cannot visualise trace events using the trace visualiser" )
2023-04-28 03:24:11 +08:00
}
}
2023-08-30 21:39:31 +08:00
req , err := e . createRequest ( ctx , url , query )
2020-04-27 23:43:02 +08:00
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2020-04-27 23:43:02 +08:00
}
2023-10-05 07:38:26 +08:00
_ , span := tracing . DefaultTracer ( ) . Start ( ctx , "azure log analytics query" , trace . WithAttributes (
2023-10-03 20:54:20 +08:00
attribute . String ( "target" , query . Query ) ,
attribute . Int64 ( "from" , query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
attribute . Int64 ( "until" , query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
attribute . Int64 ( "datasource_id" , dsInfo . DatasourceID ) ,
attribute . Int64 ( "org_id" , dsInfo . OrgID ) ,
) )
2022-01-20 18:10:12 +08:00
defer span . End ( )
2020-04-27 23:43:02 +08:00
2022-04-12 02:20:10 +08:00
res , err := client . Do ( req )
2020-04-27 23:43:02 +08:00
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2020-04-27 23:43:02 +08:00
}
2023-02-23 22:10:03 +08:00
defer func ( ) {
2023-09-11 22:02:44 +08:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-27 06:53:55 +08:00
e . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-09-11 22:02:44 +08:00
}
2023-02-23 22:10:03 +08:00
} ( )
2023-08-30 21:39:31 +08:00
logResponse , err := e . unmarshalResponse ( res )
2020-04-27 23:43:02 +08:00
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2020-04-27 23:43:02 +08:00
}
2020-06-06 00:32:10 +08:00
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2020-06-06 00:32:10 +08:00
}
2020-04-27 23:43:02 +08:00
2023-10-18 23:19:35 +08:00
frame , err := ResponseTableToFrame ( t , query . RefID , query . Query , query . QueryType , query . ResultFormat )
2020-06-06 00:32:10 +08:00
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2021-06-07 20:54:51 +08:00
}
2022-11-24 18:25:40 +08:00
frame = appendErrorNotice ( frame , logResponse . Error )
if frame == nil {
2023-08-30 21:39:31 +08:00
dataResponse := backend . DataResponse { }
return & dataResponse , nil
2022-11-24 18:25:40 +08:00
}
2021-06-07 20:54:51 +08:00
2024-02-08 18:42:20 +08:00
queryUrl , err := getQueryUrl ( query . Query , query . Resources , dsInfo . Routes [ "Azure Portal" ] . URL , query . TimeRange )
2023-10-18 23:19:35 +08:00
if err != nil {
return nil , err
2023-04-28 03:24:11 +08:00
}
2023-10-18 23:19:35 +08:00
if query . QueryType == dataquery . AzureQueryTypeAzureTraces && query . ResultFormat == dataquery . ResultFormatTrace {
frame . Meta . PreferredVisualization = data . VisTypeTrace
}
if query . ResultFormat == dataquery . ResultFormatTable {
frame . Meta . PreferredVisualization = data . VisTypeTable
}
if query . ResultFormat == dataquery . ResultFormatLogs {
frame . Meta . PreferredVisualization = data . VisTypeLogs
frame . Meta . Custom = & LogAnalyticsMeta {
ColumnTypes : frame . Meta . Custom . ( * LogAnalyticsMeta ) . ColumnTypes ,
AzurePortalLink : queryUrl ,
}
2020-06-30 04:20:24 +08:00
}
2020-06-06 00:32:10 +08:00
2022-03-02 22:41:07 +08:00
if query . ResultFormat == types . TimeSeries {
2020-06-06 00:32:10 +08:00
tsSchema := frame . TimeSeriesSchema ( )
if tsSchema . Type == data . TimeSeriesTypeLong {
2020-08-19 22:42:54 +08:00
wideFrame , err := data . LongToWide ( frame , nil )
2020-06-06 00:32:10 +08:00
if err == nil {
frame = wideFrame
} else {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not convert frame to time series, returning raw table: " + err . Error ( ) } )
}
}
}
2021-06-29 16:39:28 +08:00
2023-10-18 23:19:35 +08:00
// Use the parent span query for the parent span data link
2024-02-08 18:42:20 +08:00
err = addDataLinksToFields ( query , dsInfo . Routes [ "Azure Portal" ] . URL , frame , dsInfo , queryUrl )
2023-04-28 03:24:11 +08:00
if err != nil {
2023-08-30 21:39:31 +08:00
return nil , err
2023-04-28 03:24:11 +08:00
}
2023-10-18 23:19:35 +08:00
dataResponse := backend . DataResponse { Frames : data . Frames { frame } }
return & dataResponse , nil
}
2023-04-28 03:24:11 +08:00
2023-10-18 23:19:35 +08:00
func addDataLinksToFields ( query * AzureLogAnalyticsQuery , azurePortalBaseUrl string , frame * data . Frame , dsInfo types . DatasourceInfo , queryUrl string ) error {
if query . QueryType == dataquery . AzureQueryTypeAzureTraces {
err := addTraceDataLinksToFields ( query , azurePortalBaseUrl , frame , dsInfo )
2023-04-28 03:24:11 +08:00
if err != nil {
2023-10-18 23:19:35 +08:00
return err
2023-04-28 03:24:11 +08:00
}
2023-10-18 23:19:35 +08:00
return nil
}
2023-04-28 03:24:11 +08:00
2023-10-18 23:19:35 +08:00
if query . ResultFormat == dataquery . ResultFormatLogs {
return nil
}
AddConfigLinks ( * frame , queryUrl , nil )
return nil
}
2023-04-28 03:24:11 +08:00
2023-10-18 23:19:35 +08:00
func addTraceDataLinksToFields ( query * AzureLogAnalyticsQuery , azurePortalBaseUrl string , frame * data . Frame , dsInfo types . DatasourceInfo ) error {
tracesUrl , err := getTracesQueryUrl ( query . Resources , azurePortalBaseUrl )
if err != nil {
return err
}
queryJSONModel := dataquery . AzureMonitorQuery { }
err = json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
return err
}
traceIdVariable := "${__data.fields.traceID}"
resultFormat := dataquery . ResultFormatTrace
queryJSONModel . AzureTraces . ResultFormat = & resultFormat
queryJSONModel . AzureTraces . Query = & query . TraceExploreQuery
if queryJSONModel . AzureTraces . OperationId == nil || * queryJSONModel . AzureTraces . OperationId == "" {
queryJSONModel . AzureTraces . OperationId = & traceIdVariable
}
logsQueryType := string ( dataquery . AzureQueryTypeAzureLogAnalytics )
logsJSONModel := dataquery . AzureMonitorQuery {
2024-03-21 18:11:29 +08:00
QueryType : & logsQueryType ,
2023-10-18 23:19:35 +08:00
AzureLogAnalytics : & dataquery . AzureLogsQuery {
Query : & query . TraceLogsExploreQuery ,
Resources : [ ] string { queryJSONModel . AzureTraces . Resources [ 0 ] } ,
} ,
}
if query . ResultFormat == dataquery . ResultFormatTable {
2023-04-28 03:24:11 +08:00
AddCustomDataLink ( * frame , data . DataLink {
2023-10-18 23:19:35 +08:00
Title : "Explore Trace: ${__data.fields.traceID}" ,
2023-04-28 03:24:11 +08:00
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
2023-10-18 23:19:35 +08:00
Query : queryJSONModel ,
2023-04-28 03:24:11 +08:00
} ,
} )
2023-10-18 23:19:35 +08:00
queryJSONModel . AzureTraces . Query = & query . TraceParentExploreQuery
AddCustomDataLink ( * frame , data . DataLink {
Title : "Explore Parent Span: ${__data.fields.parentSpanID}" ,
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
Query : queryJSONModel ,
} ,
} )
linkTitle := "Explore Trace in Azure Portal"
AddConfigLinks ( * frame , tracesUrl , & linkTitle )
2023-04-28 03:24:11 +08:00
}
2023-10-18 23:19:35 +08:00
AddCustomDataLink ( * frame , data . DataLink {
Title : "Explore Trace Logs" ,
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
Query : logsJSONModel ,
} ,
} )
return nil
2020-04-27 23:43:02 +08:00
}
2022-11-24 18:25:40 +08:00
func appendErrorNotice ( frame * data . Frame , err * AzureLogAnalyticsAPIError ) * data . Frame {
if err == nil {
return frame
}
if frame == nil {
frame = & data . Frame { }
2022-10-05 21:29:34 +08:00
}
2022-11-24 18:25:40 +08:00
frame . AppendNotices ( apiErrorToNotice ( err ) )
return frame
2022-10-05 21:29:34 +08:00
}
2023-08-30 21:39:31 +08:00
func ( e * AzureLogAnalyticsDatasource ) createRequest ( ctx context . Context , queryURL string , query * AzureLogAnalyticsQuery ) ( * http . Request , error ) {
2023-01-13 00:25:13 +08:00
body := map [ string ] interface { } {
2023-07-17 19:02:16 +08:00
"query" : query . Query ,
}
2023-09-19 01:38:39 +08:00
if query . DashboardTime {
2023-07-17 19:02:16 +08:00
from := query . TimeRange . From . Format ( time . RFC3339 )
to := query . TimeRange . To . Format ( time . RFC3339 )
timespan := fmt . Sprintf ( "%s/%s" , from , to )
body [ "timespan" ] = timespan
2023-09-19 01:38:39 +08:00
body [ "query_datetimescope_from" ] = from
body [ "query_datetimescope_to" ] = to
body [ "query_datetimescope_column" ] = query . TimeColumn
2023-01-13 00:25:13 +08:00
}
2023-07-04 17:45:31 +08:00
2023-10-18 23:19:35 +08:00
if len ( query . Resources ) > 1 && query . QueryType == dataquery . AzureQueryTypeAzureLogAnalytics && ! query . AppInsightsQuery {
2024-02-27 23:46:29 +08:00
str := strings . ToLower ( query . Resources [ 0 ] )
if strings . Contains ( str , "microsoft.operationalinsights/workspaces" ) {
body [ "workspaces" ] = query . Resources
} else {
body [ "resources" ] = query . Resources
}
2023-01-13 00:25:13 +08:00
}
2024-02-27 23:46:29 +08:00
2023-07-04 17:45:31 +08:00
if query . AppInsightsQuery {
body [ "applications" ] = query . Resources
}
2024-02-27 23:46:29 +08:00
2023-01-13 00:25:13 +08:00
jsonValue , err := json . Marshal ( body )
if err != nil {
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
}
req , err := http . NewRequestWithContext ( ctx , http . MethodPost , queryURL , bytes . NewBuffer ( jsonValue ) )
2020-04-27 23:43:02 +08:00
if err != nil {
2022-06-03 15:24:24 +08:00
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
2020-04-27 23:43:02 +08:00
}
2024-02-27 23:46:29 +08:00
2021-06-11 23:02:24 +08:00
req . URL . Path = "/"
2020-04-27 23:43:02 +08:00
req . Header . Set ( "Content-Type" , "application/json" )
2023-01-13 00:25:13 +08:00
req . URL . Path = path . Join ( req . URL . Path , query . URL )
2020-04-27 23:43:02 +08:00
return req , nil
}
2023-03-30 00:01:18 +08:00
type AzureLogAnalyticsURLResources struct {
Resources [ ] AzureLogAnalyticsURLResource ` json:"resources" `
}
type AzureLogAnalyticsURLResource struct {
ResourceID string ` json:"resourceId" `
}
2023-07-19 16:15:14 +08:00
func getQueryUrl ( query string , resources [ ] string , azurePortalUrl string , timeRange backend . TimeRange ) ( string , error ) {
2023-03-30 00:01:18 +08:00
encodedQuery , err := encodeQuery ( query )
if err != nil {
return "" , fmt . Errorf ( "failed to encode the query: %s" , err )
}
portalUrl := azurePortalUrl
if err != nil {
return "" , fmt . Errorf ( "failed to parse base portal URL: %s" , err )
}
portalUrl += "/#blade/Microsoft_OperationsManagementSuite_Workspace/AnalyticsBlade/initiator/AnalyticsShareLinkToQuery/isQueryEditorVisible/true/scope/"
resourcesJson := AzureLogAnalyticsURLResources {
Resources : make ( [ ] AzureLogAnalyticsURLResource , 0 ) ,
}
for _ , resource := range resources {
resourcesJson . Resources = append ( resourcesJson . Resources , AzureLogAnalyticsURLResource {
ResourceID : resource ,
} )
}
resourcesMarshalled , err := json . Marshal ( resourcesJson )
if err != nil {
return "" , fmt . Errorf ( "failed to marshal log analytics resources: %s" , err )
}
2023-07-19 16:15:14 +08:00
from := timeRange . From . Format ( time . RFC3339 )
to := timeRange . To . Format ( time . RFC3339 )
timespan := url . QueryEscape ( fmt . Sprintf ( "%s/%s" , from , to ) )
2023-03-30 00:01:18 +08:00
portalUrl += url . QueryEscape ( string ( resourcesMarshalled ) )
2023-07-19 16:15:14 +08:00
portalUrl += "/query/" + url . PathEscape ( encodedQuery ) + "/isQueryBase64Compressed/true/timespan/" + timespan
2023-03-30 00:01:18 +08:00
return portalUrl , nil
}
2023-04-28 03:24:11 +08:00
func getTracesQueryUrl ( resources [ ] string , azurePortalUrl string ) ( string , error ) {
portalUrl := azurePortalUrl
portalUrl += "/#view/AppInsightsExtension/DetailsV2Blade/ComponentId~/"
resource := struct {
ResourceId string ` json:"ResourceId" `
} {
resources [ 0 ] ,
}
resourceMarshalled , err := json . Marshal ( resource )
if err != nil {
return "" , fmt . Errorf ( "failed to marshal application insights resource: %s" , err )
}
portalUrl += url . PathEscape ( string ( resourceMarshalled ) )
portalUrl += "/DataModel~/"
// We're making use of data link variables to select the necessary fields in the frontend
eventId := "%22eventId%22%3A%22${__data.fields.itemId}%22%2C"
timestamp := "%22timestamp%22%3A%22${__data.fields.startTime}%22%2C"
eventTable := "%22eventTable%22%3A%22${__data.fields.itemType}%22"
traceObject := fmt . Sprintf ( "%%7B%s%s%s%%7D" , eventId , timestamp , eventTable )
portalUrl += traceObject
return portalUrl , nil
}
2023-10-05 07:38:26 +08:00
func getCorrelationWorkspaces ( ctx context . Context , baseResource string , resourcesMap map [ string ] bool , dsInfo types . DatasourceInfo , operationId string ) ( map [ string ] bool , error ) {
2023-04-28 03:24:11 +08:00
azMonService := dsInfo . Services [ "Azure Monitor" ]
2023-05-09 22:29:36 +08:00
correlationUrl := azMonService . URL + fmt . Sprintf ( "%s/providers/microsoft.insights/transactions/%s" , baseResource , operationId )
2023-04-28 03:24:11 +08:00
callCorrelationAPI := func ( url string ) ( AzureCorrelationAPIResponse , error ) {
req , err := http . NewRequestWithContext ( ctx , http . MethodPost , url , bytes . NewBuffer ( [ ] byte { } ) )
if err != nil {
return AzureCorrelationAPIResponse { } , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
}
req . URL . Path = url
req . Header . Set ( "Content-Type" , "application/json" )
values := req . URL . Query ( )
values . Add ( "api-version" , "2019-10-17-preview" )
req . URL . RawQuery = values . Encode ( )
req . Method = "GET"
2023-10-05 07:38:26 +08:00
_ , span := tracing . DefaultTracer ( ) . Start ( ctx , "azure traces correlation request" , trace . WithAttributes (
2023-10-03 20:54:20 +08:00
attribute . String ( "target" , req . URL . String ( ) ) ,
attribute . Int64 ( "datasource_id" , dsInfo . DatasourceID ) ,
attribute . Int64 ( "org_id" , dsInfo . OrgID ) ,
) )
2023-04-28 03:24:11 +08:00
defer span . End ( )
res , err := azMonService . HTTPClient . Do ( req )
if err != nil {
return AzureCorrelationAPIResponse { } , err
}
body , err := io . ReadAll ( res . Body )
if err != nil {
return AzureCorrelationAPIResponse { } , err
}
2023-08-30 21:39:31 +08:00
2023-04-28 03:24:11 +08:00
defer func ( ) {
2023-08-30 21:39:31 +08:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-27 06:53:55 +08:00
azMonService . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-04-28 03:24:11 +08:00
}
} ( )
if res . StatusCode / 100 != 2 {
return AzureCorrelationAPIResponse { } , fmt . Errorf ( "request failed, status: %s, body: %s" , res . Status , string ( body ) )
}
var data AzureCorrelationAPIResponse
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
if err != nil {
return AzureCorrelationAPIResponse { } , err
}
2023-05-09 22:29:36 +08:00
for _ , resource := range data . Properties . Resources {
lowerCaseResource := strings . ToLower ( resource )
if _ , ok := resourcesMap [ lowerCaseResource ] ; ! ok {
resourcesMap [ lowerCaseResource ] = true
}
}
2023-04-28 03:24:11 +08:00
return data , nil
}
var nextLink * string
var correlationResponse AzureCorrelationAPIResponse
correlationResponse , err := callCorrelationAPI ( correlationUrl )
if err != nil {
2023-05-09 22:29:36 +08:00
return nil , err
2023-04-28 03:24:11 +08:00
}
nextLink = correlationResponse . Properties . NextLink
for nextLink != nil {
correlationResponse , err := callCorrelationAPI ( correlationUrl )
if err != nil {
2023-05-09 22:29:36 +08:00
return nil , err
2023-04-28 03:24:11 +08:00
}
nextLink = correlationResponse . Properties . NextLink
}
2023-05-09 22:29:36 +08:00
// Remove the base element as that's where the query is run anyway
delete ( resourcesMap , strings . ToLower ( baseResource ) )
return resourcesMap , nil
2023-04-28 03:24:11 +08:00
}
2022-06-24 14:56:58 +08:00
// Error definition has been inferred from real data and other model definitions like
// https://github.com/Azure/azure-sdk-for-go/blob/3640559afddbad452d265b54fb1c20b30be0b062/services/preview/virtualmachineimagebuilder/mgmt/2019-05-01-preview/virtualmachineimagebuilder/models.go
type AzureLogAnalyticsAPIError struct {
Details * [ ] AzureLogAnalyticsAPIErrorBase ` json:"details,omitempty" `
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
}
type AzureLogAnalyticsAPIErrorBase struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Innererror * AzureLogAnalyticsInnerError ` json:"innererror,omitempty" `
}
type AzureLogAnalyticsInnerError struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Severity * int ` json:"severity,omitempty" `
SeverityName * string ` json:"severityName,omitempty" `
}
2022-03-02 22:41:07 +08:00
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables [ ] types . AzureResponseTable ` json:"tables" `
2022-06-24 14:56:58 +08:00
Error * AzureLogAnalyticsAPIError ` json:"error,omitempty" `
2022-03-02 22:41:07 +08:00
}
2023-04-28 03:24:11 +08:00
type AzureCorrelationAPIResponse struct {
ID string ` json:"id" `
Name string ` json:"name" `
Type string ` json:"type" `
Properties AzureCorrelationAPIResponseProperties ` json:"properties" `
Error * AzureLogAnalyticsAPIError ` json:"error,omitempty" `
}
type AzureCorrelationAPIResponseProperties struct {
Resources [ ] string ` json:"resources" `
NextLink * string ` json:"nextLink,omitempty" `
}
2020-06-06 00:32:10 +08:00
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
2022-03-02 22:41:07 +08:00
func ( ar * AzureLogAnalyticsResponse ) GetPrimaryResultTable ( ) ( * types . AzureResponseTable , error ) {
2020-06-06 00:32:10 +08:00
for _ , t := range ar . Tables {
if t . Name == "PrimaryResult" {
return & t , nil
}
}
2020-09-23 02:00:59 +08:00
return nil , fmt . Errorf ( "no data as PrimaryResult table is missing from the response" )
2020-06-06 00:32:10 +08:00
}
2023-08-30 21:39:31 +08:00
func ( e * AzureLogAnalyticsDatasource ) unmarshalResponse ( res * http . Response ) ( AzureLogAnalyticsResponse , error ) {
2022-08-10 21:37:51 +08:00
body , err := io . ReadAll ( res . Body )
2020-04-27 23:43:02 +08:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
2020-12-15 16:32:06 +08:00
defer func ( ) {
2023-09-11 22:02:44 +08:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-27 06:53:55 +08:00
e . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-09-11 22:02:44 +08:00
}
2020-12-15 16:32:06 +08:00
} ( )
2020-04-27 23:43:02 +08:00
if res . StatusCode / 100 != 2 {
2020-11-05 18:29:39 +08:00
return AzureLogAnalyticsResponse { } , fmt . Errorf ( "request failed, status: %s, body: %s" , res . Status , string ( body ) )
2020-04-27 23:43:02 +08:00
}
var data AzureLogAnalyticsResponse
2020-06-06 00:32:10 +08:00
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
2020-04-27 23:43:02 +08:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
return data , nil
}
2020-06-30 04:20:24 +08:00
// LogAnalyticsMeta is a type for the a Frame's Meta's Custom property.
type LogAnalyticsMeta struct {
2023-10-18 23:19:35 +08:00
ColumnTypes [ ] string ` json:"azureColumnTypes" `
AzurePortalLink string ` json:"azurePortalLink,omitempty" `
2020-04-27 23:43:02 +08:00
}
2020-05-12 01:11:03 +08:00
2020-06-06 00:32:10 +08:00
// encodeQuery encodes the query in gzip so the frontend can build links.
2023-03-30 00:01:18 +08:00
func encodeQuery ( rawQuery string ) ( string , error ) {
2020-05-12 01:11:03 +08:00
var b bytes . Buffer
gz := gzip . NewWriter ( & b )
if _ , err := gz . Write ( [ ] byte ( rawQuery ) ) ; err != nil {
2023-03-30 00:01:18 +08:00
return "" , err
2020-05-12 01:11:03 +08:00
}
if err := gz . Close ( ) ; err != nil {
2023-03-30 00:01:18 +08:00
return "" , err
2020-05-12 01:11:03 +08:00
}
2023-03-30 00:01:18 +08:00
return base64 . StdEncoding . EncodeToString ( b . Bytes ( ) ) , nil
2020-05-12 01:11:03 +08:00
}
2023-04-28 03:24:11 +08:00
2023-06-11 04:30:49 +08:00
func buildTracesQuery ( operationId string , parentSpanID * string , traceTypes [ ] string , filters [ ] dataquery . AzureTracesFilter , resultFormat * dataquery . ResultFormat , resources [ ] string ) string {
2023-04-28 03:24:11 +08:00
types := traceTypes
if len ( types ) == 0 {
types = Tables
}
filteredTypes := make ( [ ] string , 0 )
// If the result format is set to trace then we filter out all events that are of the type traces as they don't make sense when visualised as a span
2023-06-11 04:30:49 +08:00
if resultFormat != nil && * resultFormat == dataquery . ResultFormatTrace {
2023-04-28 03:24:11 +08:00
filteredTypes = slices . Filter ( filteredTypes , types , func ( s string ) bool { return s != "traces" } )
} else {
filteredTypes = types
}
sort . Strings ( filteredTypes )
if len ( filteredTypes ) == 0 {
return ""
}
2023-05-09 22:29:36 +08:00
resourcesQuery := strings . Join ( filteredTypes , "," )
if len ( resources ) > 0 {
intermediate := make ( [ ] string , 0 )
2023-05-10 18:27:02 +08:00
for _ , resource := range resources {
2023-05-09 22:29:36 +08:00
for _ , table := range filteredTypes {
2023-05-17 01:30:09 +08:00
intermediate = append ( intermediate , fmt . Sprintf ( "app('%s').%s" , resource , table ) )
2023-05-09 22:29:36 +08:00
}
}
resourcesQuery += "," + strings . Join ( intermediate , "," )
}
2023-04-28 03:24:11 +08:00
tagsMap := make ( map [ string ] bool )
var tags [ ] string
for _ , t := range filteredTypes {
tableTags := getTagsForTable ( t )
for _ , i := range tableTags {
if tagsMap [ i ] {
continue
}
if i == "cloud_RoleInstance" || i == "cloud_RoleName" || i == "customDimensions" || i == "customMeasurements" {
continue
}
tags = append ( tags , i )
tagsMap [ i ] = true
}
}
sort . Strings ( tags )
whereClause := ""
if operationId != "" {
whereClause = fmt . Sprintf ( "| where (operation_Id != '' and operation_Id == '%s') or (customDimensions.ai_legacyRootId != '' and customDimensions.ai_legacyRootId == '%s')" , operationId , operationId )
}
2023-05-17 01:30:09 +08:00
parentWhereClause := ""
if parentSpanID != nil && * parentSpanID != "" {
parentWhereClause = fmt . Sprintf ( "| where (operation_ParentId != '' and operation_ParentId == '%s')" , * parentSpanID )
}
2023-04-28 03:24:11 +08:00
filtersClause := ""
if len ( filters ) > 0 {
for _ , filter := range filters {
if len ( filter . Filters ) == 0 {
continue
}
operation := "in"
if filter . Operation == "ne" {
operation = "!in"
}
filterValues := [ ] string { }
for _ , val := range filter . Filters {
filterValues = append ( filterValues , fmt . Sprintf ( ` "%s" ` , val ) )
}
filtersClause += fmt . Sprintf ( "| where %s %s (%s)" , filter . Property , operation , strings . Join ( filterValues , "," ) )
}
}
propertiesFunc := "bag_merge(customDimensions, customMeasurements)"
if len ( tags ) > 0 {
propertiesFunc = fmt . Sprintf ( "bag_merge(bag_pack_columns(%s), customDimensions, customMeasurements)" , strings . Join ( tags , "," ) )
}
2023-05-09 22:24:20 +08:00
errorProperty := ` | extend error = todynamic(iff(itemType == "exception", "true", "false")) `
2023-04-28 03:24:11 +08:00
2023-05-09 22:29:36 +08:00
baseQuery := fmt . Sprintf ( ` set truncationmaxrecords=10000; set truncationmaxsize=67108864; union isfuzzy=true %s ` , resourcesQuery )
2023-04-28 03:24:11 +08:00
propertiesStaticQuery := ` | extend duration = iff(isnull(column_ifexists("duration", real(null))), toreal(0), column_ifexists("duration", real(null))) ` +
` | extend spanID = iff(itemType == "pageView" or isempty(column_ifexists("id", "")), tostring(new_guid()), column_ifexists("id", "")) ` +
` | extend operationName = iff(isempty(column_ifexists("name", "")), column_ifexists("problemId", ""), column_ifexists("name", "")) ` +
` | extend serviceName = cloud_RoleName ` +
` | extend serviceTags = bag_pack_columns(cloud_RoleInstance, cloud_RoleName) `
propertiesQuery := fmt . Sprintf ( ` | extend tags = %s ` , propertiesFunc )
projectClause := ` | project-rename traceID = operation_Id, parentSpanID = operation_ParentId, startTime = timestamp ` +
` | project startTime, itemType, serviceName, duration, traceID, spanID, parentSpanID, operationName, serviceTags, tags, itemId ` +
` | order by startTime asc `
2023-05-17 01:30:09 +08:00
return baseQuery + whereClause + parentWhereClause + propertiesStaticQuery + errorProperty + propertiesQuery + filtersClause + projectClause
2023-04-28 03:24:11 +08:00
}
2023-05-10 18:27:02 +08:00
func buildTracesLogsQuery ( operationId string , resources [ ] string ) string {
2023-05-09 22:29:36 +08:00
types := Tables
sort . Strings ( types )
selectors := "union " + strings . Join ( types , ",\n" ) + "\n"
if len ( resources ) > 0 {
intermediate := make ( [ ] string , 0 )
2023-05-10 18:27:02 +08:00
for _ , resource := range resources {
2023-05-09 22:29:36 +08:00
for _ , table := range types {
2023-05-17 01:30:09 +08:00
intermediate = append ( intermediate , fmt . Sprintf ( "app('%s').%s" , resource , table ) )
2023-05-09 22:29:36 +08:00
}
}
sort . Strings ( intermediate )
types = intermediate
selectors = strings . Join ( append ( [ ] string { "union *" } , types ... ) , ",\n" ) + "\n"
}
query := selectors
query += fmt . Sprintf ( ` | where operation_Id == "%s" ` , operationId )
return query
2023-04-28 03:24:11 +08:00
}