2022-03-02 22:41:07 +08:00
package loganalytics
2020-04-27 23:43:02 +08:00
import (
2020-05-12 01:11:03 +08:00
"bytes"
"compress/gzip"
2020-04-27 23:43:02 +08:00
"context"
"encoding/json"
"fmt"
2022-08-10 21:37:51 +08:00
"io"
2020-04-27 23:43:02 +08:00
"net/http"
"net/url"
"path"
2021-05-20 16:16:29 +08:00
"regexp"
2021-06-07 20:54:51 +08:00
"time"
2020-04-27 23:43:02 +08:00
2021-06-07 20:54:51 +08:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
2020-06-06 00:32:10 +08:00
"github.com/grafana/grafana-plugin-sdk-go/data"
2022-04-12 02:20:10 +08:00
"go.opentelemetry.io/otel/attribute"
2020-04-27 23:43:02 +08:00
"github.com/grafana/grafana/pkg/components/simplejson"
2022-11-04 21:28:38 +08:00
"github.com/grafana/grafana/pkg/infra/log"
2022-01-20 18:10:12 +08:00
"github.com/grafana/grafana/pkg/infra/tracing"
2022-03-02 22:41:07 +08:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
2020-04-27 23:43:02 +08:00
)
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
2021-07-16 18:47:26 +08:00
type AzureLogAnalyticsDatasource struct {
2022-03-02 22:41:07 +08:00
Proxy types . ServiceProxy
2021-07-16 18:47:26 +08:00
}
2020-04-27 23:43:02 +08:00
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
// from the UI
type AzureLogAnalyticsQuery struct {
RefID string
ResultFormat string
URL string
2021-06-07 20:54:51 +08:00
JSON json . RawMessage
2020-04-27 23:43:02 +08:00
Params url . Values
Target string
2021-06-07 20:54:51 +08:00
TimeRange backend . TimeRange
2020-04-27 23:43:02 +08:00
}
2022-03-02 22:41:07 +08:00
func ( e * AzureLogAnalyticsDatasource ) ResourceRequest ( rw http . ResponseWriter , req * http . Request , cli * http . Client ) {
e . Proxy . Do ( rw , req , cli )
2021-07-16 18:47:26 +08:00
}
2020-04-27 23:43:02 +08:00
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
2021-06-07 20:54:51 +08:00
// 3. parses the responses for each query into data frames
2022-11-04 21:28:38 +08:00
func ( e * AzureLogAnalyticsDatasource ) ExecuteTimeSeriesQuery ( ctx context . Context , logger log . Logger , originalQueries [ ] backend . DataQuery , dsInfo types . DatasourceInfo , client * http . Client , url string , tracer tracing . Tracer ) ( * backend . QueryDataResponse , error ) {
2021-06-07 20:54:51 +08:00
result := backend . NewQueryDataResponse ( )
2022-11-04 21:28:38 +08:00
ctxLogger := logger . FromContext ( ctx )
queries , err := e . buildQueries ( ctxLogger , originalQueries , dsInfo )
2020-04-27 23:43:02 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
return nil , err
2020-04-27 23:43:02 +08:00
}
for _ , query := range queries {
2022-11-04 21:28:38 +08:00
result . Responses [ query . RefID ] = e . executeQuery ( ctx , ctxLogger , query , dsInfo , client , url , tracer )
2020-04-27 23:43:02 +08:00
}
return result , nil
}
2022-03-02 22:41:07 +08:00
func getApiURL ( queryJSONModel types . LogJSONQuery ) string {
2021-05-20 16:16:29 +08:00
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
// to a resource URI, so they should use the new resource-centric.
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
var resourceOrWorkspace string
if azureLogAnalyticsTarget . Resource != "" {
resourceOrWorkspace = azureLogAnalyticsTarget . Resource
} else {
resourceOrWorkspace = azureLogAnalyticsTarget . Workspace
}
matchesResourceURI , _ := regexp . MatchString ( "^/subscriptions/" , resourceOrWorkspace )
if matchesResourceURI {
return fmt . Sprintf ( "v1%s/query" , resourceOrWorkspace )
} else {
return fmt . Sprintf ( "v1/workspaces/%s/query" , resourceOrWorkspace )
}
}
2022-11-04 21:28:38 +08:00
func ( e * AzureLogAnalyticsDatasource ) buildQueries ( logger log . Logger , queries [ ] backend . DataQuery , dsInfo types . DatasourceInfo ) ( [ ] * AzureLogAnalyticsQuery , error ) {
2020-04-27 23:43:02 +08:00
azureLogAnalyticsQueries := [ ] * AzureLogAnalyticsQuery { }
for _ , query := range queries {
2022-03-02 22:41:07 +08:00
queryJSONModel := types . LogJSONQuery { }
2021-06-07 20:54:51 +08:00
err := json . Unmarshal ( query . JSON , & queryJSONModel )
2020-05-26 22:52:33 +08:00
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Log Analytics query object from JSON: %w" , err )
}
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
2022-11-04 21:28:38 +08:00
logger . Debug ( "AzureLogAnalytics" , "target" , azureLogAnalyticsTarget )
2020-04-27 23:43:02 +08:00
2020-05-26 22:52:33 +08:00
resultFormat := azureLogAnalyticsTarget . ResultFormat
2020-04-27 23:43:02 +08:00
if resultFormat == "" {
2022-03-02 22:41:07 +08:00
resultFormat = types . TimeSeries
2020-04-27 23:43:02 +08:00
}
2021-05-20 16:16:29 +08:00
apiURL := getApiURL ( queryJSONModel )
2020-04-27 23:43:02 +08:00
params := url . Values { }
2022-11-04 21:28:38 +08:00
rawQuery , err := macros . KqlInterpolate ( logger , query , dsInfo , azureLogAnalyticsTarget . Query , "TimeGenerated" )
2020-04-27 23:43:02 +08:00
if err != nil {
return nil , err
}
params . Add ( "query" , rawQuery )
azureLogAnalyticsQueries = append ( azureLogAnalyticsQueries , & AzureLogAnalyticsQuery {
2021-03-08 14:02:49 +08:00
RefID : query . RefID ,
2020-04-27 23:43:02 +08:00
ResultFormat : resultFormat ,
URL : apiURL ,
2021-06-07 20:54:51 +08:00
JSON : query . JSON ,
2020-04-27 23:43:02 +08:00
Params : params ,
Target : params . Encode ( ) ,
2021-06-07 20:54:51 +08:00
TimeRange : query . TimeRange ,
2020-04-27 23:43:02 +08:00
} )
}
return azureLogAnalyticsQueries , nil
}
2022-11-04 21:28:38 +08:00
func ( e * AzureLogAnalyticsDatasource ) executeQuery ( ctx context . Context , logger log . Logger , query * AzureLogAnalyticsQuery , dsInfo types . DatasourceInfo , client * http . Client ,
2022-01-20 18:10:12 +08:00
url string , tracer tracing . Tracer ) backend . DataResponse {
2021-06-07 20:54:51 +08:00
dataResponse := backend . DataResponse { }
2020-04-27 23:43:02 +08:00
2021-06-07 20:54:51 +08:00
dataResponseErrorWithExecuted := func ( err error ) backend . DataResponse {
dataResponse . Error = err
dataResponse . Frames = data . Frames {
2020-07-01 04:05:53 +08:00
& data . Frame {
RefID : query . RefID ,
Meta : & data . FrameMeta {
ExecutedQueryString : query . Params . Get ( "query" ) ,
} ,
} ,
}
2021-06-07 20:54:51 +08:00
return dataResponse
2020-06-06 00:32:10 +08:00
}
2021-07-14 15:53:24 +08:00
// If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs , ok := dsInfo . JSONData [ "azureLogAnalyticsSameAs" ] ; ok && ! sameAs . ( bool ) {
2022-06-24 14:56:58 +08:00
return dataResponseErrorWithExecuted ( fmt . Errorf ( "credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials" ) )
2021-07-14 15:53:24 +08:00
}
2022-11-04 21:28:38 +08:00
req , err := e . createRequest ( ctx , logger , url )
2020-04-27 23:43:02 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
dataResponse . Error = err
return dataResponse
2020-04-27 23:43:02 +08:00
}
req . URL . Path = path . Join ( req . URL . Path , query . URL )
req . URL . RawQuery = query . Params . Encode ( )
2022-01-20 18:10:12 +08:00
ctx , span := tracer . Start ( ctx , "azure log analytics query" )
span . SetAttributes ( "target" , query . Target , attribute . Key ( "target" ) . String ( query . Target ) )
span . SetAttributes ( "from" , query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "from" ) . Int64 ( query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "until" , query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "until" ) . Int64 ( query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "datasource_id" , dsInfo . DatasourceID , attribute . Key ( "datasource_id" ) . Int64 ( dsInfo . DatasourceID ) )
span . SetAttributes ( "org_id" , dsInfo . OrgID , attribute . Key ( "org_id" ) . Int64 ( dsInfo . OrgID ) )
2020-04-27 23:43:02 +08:00
2022-01-20 18:10:12 +08:00
defer span . End ( )
2020-04-27 23:43:02 +08:00
2022-01-20 18:10:12 +08:00
tracer . Inject ( ctx , req . Header , span )
2020-04-27 23:43:02 +08:00
2022-11-04 21:28:38 +08:00
logger . Debug ( "AzureLogAnalytics" , "Request ApiURL" , req . URL . String ( ) )
2022-04-12 02:20:10 +08:00
res , err := client . Do ( req )
2020-04-27 23:43:02 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 23:43:02 +08:00
}
2022-11-04 21:28:38 +08:00
logResponse , err := e . unmarshalResponse ( logger , res )
2020-04-27 23:43:02 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 23:43:02 +08:00
}
2020-06-06 00:32:10 +08:00
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
2021-06-07 20:54:51 +08:00
return dataResponseErrorWithExecuted ( err )
2020-06-06 00:32:10 +08:00
}
2020-04-27 23:43:02 +08:00
2022-10-05 21:29:34 +08:00
frame , err := ResponseTableToFrame ( t , query . RefID , query . Params . Get ( "query" ) )
2020-06-06 00:32:10 +08:00
if err != nil {
2021-06-07 20:54:51 +08:00
return dataResponseErrorWithExecuted ( err )
}
2022-10-05 21:29:34 +08:00
appendErrorNotice ( frame , logResponse . Error )
2021-06-07 20:54:51 +08:00
model , err := simplejson . NewJson ( query . JSON )
if err != nil {
return dataResponseErrorWithExecuted ( err )
2020-04-27 23:43:02 +08:00
}
2020-06-30 04:20:24 +08:00
err = setAdditionalFrameMeta ( frame ,
2020-06-06 00:32:10 +08:00
query . Params . Get ( "query" ) ,
2021-06-07 20:54:51 +08:00
model . Get ( "subscriptionId" ) . MustString ( ) ,
model . Get ( "azureLogAnalytics" ) . Get ( "workspace" ) . MustString ( ) )
2020-06-30 04:20:24 +08:00
if err != nil {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not add custom metadata: " + err . Error ( ) } )
2022-11-04 21:28:38 +08:00
logger . Warn ( "failed to add custom metadata to azure log analytics response" , err )
2020-06-30 04:20:24 +08:00
}
2020-06-06 00:32:10 +08:00
2022-03-02 22:41:07 +08:00
if query . ResultFormat == types . TimeSeries {
2020-06-06 00:32:10 +08:00
tsSchema := frame . TimeSeriesSchema ( )
if tsSchema . Type == data . TimeSeriesTypeLong {
2020-08-19 22:42:54 +08:00
wideFrame , err := data . LongToWide ( frame , nil )
2020-06-06 00:32:10 +08:00
if err == nil {
frame = wideFrame
} else {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not convert frame to time series, returning raw table: " + err . Error ( ) } )
}
}
}
2021-06-29 16:39:28 +08:00
2021-06-07 20:54:51 +08:00
dataResponse . Frames = data . Frames { frame }
return dataResponse
2020-04-27 23:43:02 +08:00
}
2022-10-05 21:29:34 +08:00
func appendErrorNotice ( frame * data . Frame , err * AzureLogAnalyticsAPIError ) {
if err != nil {
frame . AppendNotices ( apiErrorToNotice ( err ) )
}
}
2022-11-04 21:28:38 +08:00
func ( e * AzureLogAnalyticsDatasource ) createRequest ( ctx context . Context , logger log . Logger , url string ) ( * http . Request , error ) {
2022-04-12 02:20:10 +08:00
req , err := http . NewRequestWithContext ( ctx , http . MethodGet , url , nil )
2020-04-27 23:43:02 +08:00
if err != nil {
2022-11-04 21:28:38 +08:00
logger . Debug ( "Failed to create request" , "error" , err )
2022-06-03 15:24:24 +08:00
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
2020-04-27 23:43:02 +08:00
}
2021-06-11 23:02:24 +08:00
req . URL . Path = "/"
2020-04-27 23:43:02 +08:00
req . Header . Set ( "Content-Type" , "application/json" )
return req , nil
}
2022-06-24 14:56:58 +08:00
// Error definition has been inferred from real data and other model definitions like
// https://github.com/Azure/azure-sdk-for-go/blob/3640559afddbad452d265b54fb1c20b30be0b062/services/preview/virtualmachineimagebuilder/mgmt/2019-05-01-preview/virtualmachineimagebuilder/models.go
type AzureLogAnalyticsAPIError struct {
Details * [ ] AzureLogAnalyticsAPIErrorBase ` json:"details,omitempty" `
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
}
type AzureLogAnalyticsAPIErrorBase struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Innererror * AzureLogAnalyticsInnerError ` json:"innererror,omitempty" `
}
type AzureLogAnalyticsInnerError struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Severity * int ` json:"severity,omitempty" `
SeverityName * string ` json:"severityName,omitempty" `
}
2022-03-02 22:41:07 +08:00
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables [ ] types . AzureResponseTable ` json:"tables" `
2022-06-24 14:56:58 +08:00
Error * AzureLogAnalyticsAPIError ` json:"error,omitempty" `
2022-03-02 22:41:07 +08:00
}
2020-06-06 00:32:10 +08:00
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
2022-03-02 22:41:07 +08:00
func ( ar * AzureLogAnalyticsResponse ) GetPrimaryResultTable ( ) ( * types . AzureResponseTable , error ) {
2020-06-06 00:32:10 +08:00
for _ , t := range ar . Tables {
if t . Name == "PrimaryResult" {
return & t , nil
}
}
2020-09-23 02:00:59 +08:00
return nil , fmt . Errorf ( "no data as PrimaryResult table is missing from the response" )
2020-06-06 00:32:10 +08:00
}
2022-11-04 21:28:38 +08:00
func ( e * AzureLogAnalyticsDatasource ) unmarshalResponse ( logger log . Logger , res * http . Response ) ( AzureLogAnalyticsResponse , error ) {
2022-08-10 21:37:51 +08:00
body , err := io . ReadAll ( res . Body )
2020-04-27 23:43:02 +08:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
2020-12-15 16:32:06 +08:00
defer func ( ) {
if err := res . Body . Close ( ) ; err != nil {
2022-11-04 21:28:38 +08:00
logger . Warn ( "Failed to close response body" , "err" , err )
2020-12-15 16:32:06 +08:00
}
} ( )
2020-04-27 23:43:02 +08:00
if res . StatusCode / 100 != 2 {
2022-11-04 21:28:38 +08:00
logger . Debug ( "Request failed" , "status" , res . Status , "body" , string ( body ) )
2020-11-05 18:29:39 +08:00
return AzureLogAnalyticsResponse { } , fmt . Errorf ( "request failed, status: %s, body: %s" , res . Status , string ( body ) )
2020-04-27 23:43:02 +08:00
}
var data AzureLogAnalyticsResponse
2020-06-06 00:32:10 +08:00
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
2020-04-27 23:43:02 +08:00
if err != nil {
2022-11-04 21:28:38 +08:00
logger . Debug ( "Failed to unmarshal Azure Log Analytics response" , "error" , err , "status" , res . Status , "body" , string ( body ) )
2020-04-27 23:43:02 +08:00
return AzureLogAnalyticsResponse { } , err
}
return data , nil
}
2020-06-30 04:20:24 +08:00
// LogAnalyticsMeta is a type for the a Frame's Meta's Custom property.
type LogAnalyticsMeta struct {
ColumnTypes [ ] string ` json:"azureColumnTypes" `
Subscription string ` json:"subscription" `
Workspace string ` json:"workspace" `
EncodedQuery [ ] byte ` json:"encodedQuery" ` // EncodedQuery is used for deep links.
}
func setAdditionalFrameMeta ( frame * data . Frame , query , subscriptionID , workspace string ) error {
2022-11-23 23:15:18 +08:00
if frame . Meta == nil || frame . Meta . Custom == nil {
// empty response
return nil
}
2020-06-06 00:32:10 +08:00
frame . Meta . ExecutedQueryString = query
2020-06-30 04:20:24 +08:00
la , ok := frame . Meta . Custom . ( * LogAnalyticsMeta )
if ! ok {
return fmt . Errorf ( "unexpected type found for frame's custom metadata" )
}
la . Subscription = subscriptionID
la . Workspace = workspace
2020-06-06 00:32:10 +08:00
encodedQuery , err := encodeQuery ( query )
if err == nil {
2020-06-30 04:20:24 +08:00
la . EncodedQuery = encodedQuery
return nil
2020-04-27 23:43:02 +08:00
}
2020-06-30 04:20:24 +08:00
return fmt . Errorf ( "failed to encode the query into the encodedQuery property" )
2020-04-27 23:43:02 +08:00
}
2020-05-12 01:11:03 +08:00
2020-06-06 00:32:10 +08:00
// encodeQuery encodes the query in gzip so the frontend can build links.
func encodeQuery ( rawQuery string ) ( [ ] byte , error ) {
2020-05-12 01:11:03 +08:00
var b bytes . Buffer
gz := gzip . NewWriter ( & b )
if _ , err := gz . Write ( [ ] byte ( rawQuery ) ) ; err != nil {
2020-06-06 00:32:10 +08:00
return nil , err
2020-05-12 01:11:03 +08:00
}
if err := gz . Close ( ) ; err != nil {
2020-06-06 00:32:10 +08:00
return nil , err
2020-05-12 01:11:03 +08:00
}
2020-06-06 00:32:10 +08:00
return b . Bytes ( ) , nil
2020-05-12 01:11:03 +08:00
}