Azure Monitor: Remove infra logger (#73652)

* removed infra logs

* improve health check

* remove debug and error logs

* feedback

* Update pkg/tsdb/azuremonitor/azuremonitor-resource-handler.go

* Update pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go

* fix close body error

* update test

* resource request should return errors

* go linter

* go linter
This commit is contained in:
Andrew Hackmann 2023-08-30 08:39:31 -05:00 committed by GitHub
parent 26d5afecaf
commit 2a835301c3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 260 additions and 320 deletions

View File

@ -7,6 +7,7 @@ import (
"net/url" "net/url"
"strings" "strings"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter" "github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
@ -25,19 +26,19 @@ func getTarget(original string) (target string, err error) {
type httpServiceProxy struct { type httpServiceProxy struct {
} }
func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter { func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
res, err := cli.Do(req) res, err := cli.Do(req)
if err != nil { if err != nil {
rw.WriteHeader(http.StatusInternalServerError) rw.WriteHeader(http.StatusInternalServerError)
_, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err))) _, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err)))
if err != nil { if err != nil {
logger.Error("Unable to write HTTP response", "error", err) return nil, fmt.Errorf("unable to write HTTP response: %v", err)
} }
return nil return nil, err
} }
defer func() { defer func() {
if err := res.Body.Close(); err != nil { if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err) backend.Logger.Warn("Failed to close response body", "err", err)
} }
}() }()
@ -46,14 +47,14 @@ func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *ht
rw.WriteHeader(http.StatusInternalServerError) rw.WriteHeader(http.StatusInternalServerError)
_, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err))) _, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err)))
if err != nil { if err != nil {
logger.Error("Unable to write HTTP response", "error", err) return nil, fmt.Errorf("unable to write HTTP response: %v", err)
} }
return nil return nil, err
} }
rw.WriteHeader(res.StatusCode) rw.WriteHeader(res.StatusCode)
_, err = rw.Write(body) _, err = rw.Write(body)
if err != nil { if err != nil {
logger.Error("Unable to write HTTP response", "error", err) return nil, fmt.Errorf("unable to write HTTP response: %v", err)
} }
for k, v := range res.Header { for k, v := range res.Header {
@ -63,7 +64,7 @@ func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *ht
} }
} }
// Returning the response write for testing purposes // Returning the response write for testing purposes
return rw return rw, nil
} }
func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (types.DatasourceInfo, error) { func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (types.DatasourceInfo, error) {
@ -84,13 +85,13 @@ func writeResponse(rw http.ResponseWriter, code int, msg string) {
rw.WriteHeader(http.StatusBadRequest) rw.WriteHeader(http.StatusBadRequest)
_, err := rw.Write([]byte(msg)) _, err := rw.Write([]byte(msg))
if err != nil { if err != nil {
logger.Error("Unable to write HTTP response", "error", err) backend.Logger.Error("Unable to write HTTP response", "error", err)
} }
} }
func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseWriter, req *http.Request) { func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseWriter, req *http.Request) {
return func(rw http.ResponseWriter, req *http.Request) { return func(rw http.ResponseWriter, req *http.Request) {
logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method) backend.Logger.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
newPath, err := getTarget(req.URL.Path) newPath, err := getTarget(req.URL.Path)
if err != nil { if err != nil {
@ -114,7 +115,11 @@ func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseW
req.URL.Host = serviceURL.Host req.URL.Host = serviceURL.Host
req.URL.Scheme = serviceURL.Scheme req.URL.Scheme = serviceURL.Scheme
s.executors[subDataSource].ResourceRequest(rw, req, service.HTTPClient) rw, err = s.executors[subDataSource].ResourceRequest(rw, req, service.HTTPClient)
if err != nil {
writeResponse(rw, http.StatusInternalServerError, fmt.Sprintf("unexpected error %v", err))
return
}
} }
} }

View File

@ -66,7 +66,10 @@ func Test_proxyRequest(t *testing.T) {
} }
rw := httptest.NewRecorder() rw := httptest.NewRecorder()
proxy := httpServiceProxy{} proxy := httpServiceProxy{}
res := proxy.Do(rw, req, srv.Client()) res, err := proxy.Do(rw, req, srv.Client())
if err != nil {
t.Error(err)
}
if res.Header().Get("foo") != "bar" { if res.Header().Get("foo") != "bar" {
t.Errorf("Unexpected headers: %v", res.Header()) t.Errorf("Unexpected headers: %v", res.Header())
} }
@ -90,9 +93,9 @@ type fakeProxy struct {
requestedURL string requestedURL string
} }
func (s *fakeProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter { func (s *fakeProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
s.requestedURL = req.URL.String() s.requestedURL = req.URL.String()
return nil return nil, nil
} }
func Test_handleResourceReq(t *testing.T) { func Test_handleResourceReq(t *testing.T) {

View File

@ -17,7 +17,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt" "github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter" "github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
@ -27,8 +26,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
var logger = log.New("tsdb.azuremonitor")
func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, features featuremgmt.FeatureToggles, tracer tracing.Tracer) *Service { func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, features featuremgmt.FeatureToggles, tracer tracing.Tracer) *Service {
proxy := &httpServiceProxy{} proxy := &httpServiceProxy{}
executors := map[string]azDatasourceExecutor{ executors := map[string]azDatasourceExecutor{
@ -159,8 +156,8 @@ func getAzureRoutes(cloud string, jsonData json.RawMessage) (map[string]types.Az
} }
type azDatasourceExecutor interface { type azDatasourceExecutor interface {
ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error)
ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error)
} }
func (s *Service) getDataSourceFromPluginReq(ctx context.Context, req *backend.QueryDataRequest) (types.DatasourceInfo, error) { func (s *Service) getDataSourceFromPluginReq(ctx context.Context, req *backend.QueryDataRequest) (types.DatasourceInfo, error) {
@ -194,7 +191,7 @@ func (s *Service) newQueryMux() *datasource.QueryTypeMux {
if !ok { if !ok {
return nil, fmt.Errorf("missing service for %s", dst) return nil, fmt.Errorf("missing service for %s", dst)
} }
return executor.ExecuteTimeSeriesQuery(ctx, logger, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer) return executor.ExecuteTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer)
}) })
} }
return mux return mux
@ -214,7 +211,7 @@ func (s *Service) getDSInfo(ctx context.Context, pluginCtx backend.PluginContext
return instance, nil return instance, nil
} }
func checkAzureMonitorMetricsHealth(dsInfo types.DatasourceInfo) (*http.Response, error) { func queryMetricHealth(dsInfo types.DatasourceInfo) (*http.Response, error) {
subscriptionsApiVersion := "2020-01-01" subscriptionsApiVersion := "2020-01-01"
url := fmt.Sprintf("%v/subscriptions?api-version=%v", dsInfo.Routes["Azure Monitor"].URL, subscriptionsApiVersion) url := fmt.Sprintf("%v/subscriptions?api-version=%v", dsInfo.Routes["Azure Monitor"].URL, subscriptionsApiVersion)
request, err := http.NewRequest(http.MethodGet, url, nil) request, err := http.NewRequest(http.MethodGet, url, nil)
@ -298,6 +295,93 @@ func checkAzureMonitorResourceGraphHealth(dsInfo types.DatasourceInfo, subscript
return res, nil return res, nil
} }
func metricCheckHealth(dsInfo types.DatasourceInfo) (message string, defaultSubscription string, status backend.HealthStatus) {
defaultSubscription = dsInfo.Settings.SubscriptionId
metricsRes, err := queryMetricHealth(dsInfo)
if err != nil {
if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
return fmt.Sprintf("Error connecting to Azure Monitor endpoint: %s", err.Error()), defaultSubscription, backend.HealthStatusError
}
return err.Error(), defaultSubscription, backend.HealthStatusError
}
defer func() {
err := metricsRes.Body.Close()
if err != nil {
message += err.Error()
status = backend.HealthStatusError
}
}()
if metricsRes.StatusCode != 200 {
body, err := io.ReadAll(metricsRes.Body)
if err != nil {
return err.Error(), defaultSubscription, backend.HealthStatusError
}
return fmt.Sprintf("Error connecting to Azure Monitor endpoint: %s", string(body)), defaultSubscription, backend.HealthStatusError
}
subscriptions, err := parseSubscriptions(metricsRes)
if err != nil {
return err.Error(), defaultSubscription, backend.HealthStatusError
}
if defaultSubscription == "" && len(subscriptions) > 0 {
defaultSubscription = subscriptions[0]
}
return "Successfully connected to Azure Monitor endpoint.", defaultSubscription, backend.HealthStatusOk
}
func logAnalyticsCheckHealth(dsInfo types.DatasourceInfo, defaultSubscription string) (message string, status backend.HealthStatus) {
logsRes, err := checkAzureLogAnalyticsHealth(dsInfo, defaultSubscription)
if err != nil {
if err.Error() == "no default workspace found" {
return "No Log Analytics workspaces found.", backend.HealthStatusUnknown
}
if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
return fmt.Sprintf("Error connecting to Azure Log Analytics endpoint: %s", err.Error()), backend.HealthStatusUnknown
}
return err.Error(), backend.HealthStatusError
}
defer func() {
err := logsRes.Body.Close()
if err != nil {
message += err.Error()
status = backend.HealthStatusError
}
}()
if logsRes.StatusCode != 200 {
body, err := io.ReadAll(logsRes.Body)
if err != nil {
return err.Error(), backend.HealthStatusError
}
return fmt.Sprintf("Error connecting to Azure Log Analytics endpoint: %s", string(body)), backend.HealthStatusError
}
return "Successfully connected to Azure Log Analytics endpoint.", backend.HealthStatusOk
}
func graphLogHealthCheck(dsInfo types.DatasourceInfo, defaultSubscription string) (message string, status backend.HealthStatus) {
resourceGraphRes, err := checkAzureMonitorResourceGraphHealth(dsInfo, defaultSubscription)
if err != nil {
if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
return fmt.Sprintf("Error connecting to Azure Resource Graph endpoint: %s", err.Error()), backend.HealthStatusError
}
return err.Error(), backend.HealthStatusError
}
defer func() {
err := resourceGraphRes.Body.Close()
if err != nil {
message += err.Error()
status = backend.HealthStatusError
}
}()
if resourceGraphRes.StatusCode != 200 {
body, err := io.ReadAll(resourceGraphRes.Body)
if err != nil {
return err.Error(), backend.HealthStatusError
}
return fmt.Sprintf("Error connecting to Azure Resource Graph endpoint: %s", string(body)), backend.HealthStatusError
}
return "Successfully connected to Azure Resource Graph endpoint.", backend.HealthStatusOk
}
func parseSubscriptions(res *http.Response) ([]string, error) { func parseSubscriptions(res *http.Response) ([]string, error) {
var target struct { var target struct {
Value []struct { Value []struct {
@ -331,93 +415,21 @@ func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthReque
} }
status := backend.HealthStatusOk status := backend.HealthStatusOk
metricsLog := "Successfully connected to Azure Monitor endpoint."
logAnalyticsLog := "Successfully connected to Azure Log Analytics endpoint."
graphLog := "Successfully connected to Azure Resource Graph endpoint."
defaultSubscription := dsInfo.Settings.SubscriptionId
metricsRes, err := checkAzureMonitorMetricsHealth(dsInfo) metricsLog, defaultSubscription, metricsStatus := metricCheckHealth(dsInfo)
if err != nil || metricsRes.StatusCode != 200 { if metricsStatus != backend.HealthStatusOk {
status = backend.HealthStatusError status = metricsStatus
if err != nil {
if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
metricsLog = fmt.Sprintf("Error connecting to Azure Monitor endpoint: %s", err.Error())
} else {
return nil, err
}
} else {
body, err := io.ReadAll(metricsRes.Body)
if err != nil {
return nil, err
}
metricsLog = fmt.Sprintf("Error connecting to Azure Monitor endpoint: %s", string(body))
}
} else {
subscriptions, err := parseSubscriptions(metricsRes)
if err != nil {
return nil, err
}
if defaultSubscription == "" && len(subscriptions) > 0 {
defaultSubscription = subscriptions[0]
}
} }
logsRes, err := checkAzureLogAnalyticsHealth(dsInfo, defaultSubscription) logAnalyticsLog, logAnalyticsStatus := logAnalyticsCheckHealth(dsInfo, defaultSubscription)
if err != nil || logsRes.StatusCode != 200 { if logAnalyticsStatus != backend.HealthStatusOk {
status = backend.HealthStatusError status = logAnalyticsStatus
if err != nil {
if err.Error() == "no default workspace found" {
status = backend.HealthStatusUnknown
logAnalyticsLog = "No Log Analytics workspaces found."
} else if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
logAnalyticsLog = fmt.Sprintf("Error connecting to Azure Log Analytics endpoint: %s", err.Error())
} else {
return nil, err
}
} else {
body, err := io.ReadAll(logsRes.Body)
if err != nil {
return nil, err
}
logAnalyticsLog = fmt.Sprintf("Error connecting to Azure Log Analytics endpoint: %s", string(body))
}
} }
resourceGraphRes, err := checkAzureMonitorResourceGraphHealth(dsInfo, defaultSubscription) graphLog, graphStatus := graphLogHealthCheck(dsInfo, defaultSubscription)
if err != nil || resourceGraphRes.StatusCode != 200 { if graphStatus != backend.HealthStatusOk {
status = backend.HealthStatusError status = graphStatus
if err != nil {
if ok := errors.Is(err, types.ErrorAzureHealthCheck); ok {
graphLog = fmt.Sprintf("Error connecting to Azure Resource Graph endpoint: %s", err.Error())
} else {
return nil, err
} }
} else {
body, err := io.ReadAll(resourceGraphRes.Body)
if err != nil {
return nil, err
}
graphLog = fmt.Sprintf("Error connecting to Azure Resource Graph endpoint: %s", string(body))
}
}
defer func() {
if metricsRes != nil {
if err := metricsRes.Body.Close(); err != nil {
backend.Logger.Error("Failed to close response body", "err", err)
}
}
if logsRes != nil {
if err := logsRes.Body.Close(); logsRes != nil && err != nil {
backend.Logger.Error("Failed to close response body", "err", err)
}
}
if resourceGraphRes != nil {
if err := resourceGraphRes.Body.Close(); resourceGraphRes != nil && err != nil {
backend.Logger.Error("Failed to close response body", "err", err)
}
}
}()
if status == backend.HealthStatusOk { if status == backend.HealthStatusOk {
return &backend.CheckHealthResult{ return &backend.CheckHealthResult{

View File

@ -17,7 +17,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt" "github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
@ -132,10 +131,11 @@ type fakeExecutor struct {
expectedURL string expectedURL string
} }
func (f *fakeExecutor) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (f *fakeExecutor) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
return nil, nil
} }
func (f *fakeExecutor) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { func (f *fakeExecutor) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
if client == nil { if client == nil {
f.t.Errorf("The HTTP client for %s is missing", f.queryType) f.t.Errorf("The HTTP client for %s is missing", f.queryType)
} else { } else {

View File

@ -21,7 +21,6 @@ import (
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"k8s.io/utils/strings/slices" "k8s.io/utils/strings/slices"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
@ -51,24 +50,28 @@ type AzureLogAnalyticsQuery struct {
IntersectTime bool IntersectTime bool
} }
func (e *AzureLogAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureLogAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
e.Proxy.Do(rw, req, cli) return e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query // 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx) queries, err := e.buildQueries(ctx, originalQueries, dsInfo, tracer)
queries, err := e.buildQueries(ctx, ctxLogger, originalQueries, dsInfo, tracer)
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, query := range queries { for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer) res, err := e.executeQuery(ctx, query, dsInfo, client, url, tracer)
if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err}
continue
}
result.Responses[query.RefID] = *res
} }
return result, nil return result, nil
@ -88,7 +91,7 @@ func getApiURL(resourceOrWorkspace string, isAppInsightsQuery bool) string {
} }
} }
func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo, tracer tracing.Tracer) ([]*AzureLogAnalyticsQuery, error) { func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, queries []backend.DataQuery, dsInfo types.DatasourceInfo, tracer tracing.Tracer) ([]*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{} azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
appInsightsRegExp, err := regexp.Compile("providers/Microsoft.Insights/components") appInsightsRegExp, err := regexp.Compile("providers/Microsoft.Insights/components")
if err != nil { if err != nil {
@ -113,7 +116,6 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
} }
azureLogAnalyticsTarget := queryJSONModel.AzureLogAnalytics azureLogAnalyticsTarget := queryJSONModel.AzureLogAnalytics
logger.Debug("AzureLogAnalytics", "target", azureLogAnalyticsTarget)
if azureLogAnalyticsTarget.ResultFormat != nil { if azureLogAnalyticsTarget.ResultFormat != nil {
resultFormat = *azureLogAnalyticsTarget.ResultFormat resultFormat = *azureLogAnalyticsTarget.ResultFormat
@ -154,7 +156,6 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
} }
azureTracesTarget := queryJSONModel.AzureTraces azureTracesTarget := queryJSONModel.AzureTraces
logger.Debug("AzureTraces", "target", azureTracesTarget)
if azureTracesTarget.ResultFormat == nil { if azureTracesTarget.ResultFormat == nil {
resultFormat = types.Table resultFormat = types.Table
@ -180,7 +181,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
operationId := "" operationId := ""
if queryJSONModel.AzureTraces.OperationId != nil && *queryJSONModel.AzureTraces.OperationId != "" { if queryJSONModel.AzureTraces.OperationId != nil && *queryJSONModel.AzureTraces.OperationId != "" {
operationId = *queryJSONModel.AzureTraces.OperationId operationId = *queryJSONModel.AzureTraces.OperationId
resourcesMap, err = getCorrelationWorkspaces(ctx, logger, resourceOrWorkspace, resourcesMap, dsInfo, operationId, tracer) resourcesMap, err = getCorrelationWorkspaces(ctx, resourceOrWorkspace, resourcesMap, dsInfo, operationId, tracer)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to retrieve correlation resources for operation ID - %s: %s", operationId, err) return nil, fmt.Errorf("failed to retrieve correlation resources for operation ID - %s: %s", operationId, err)
} }
@ -204,15 +205,15 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
traceParentExploreQuery = buildTracesQuery(operationId, &parentSpanIdVariable, queryJSONModel.AzureTraces.TraceTypes, queryJSONModel.AzureTraces.Filters, &resultFormat, queryResources) traceParentExploreQuery = buildTracesQuery(operationId, &parentSpanIdVariable, queryJSONModel.AzureTraces.TraceTypes, queryJSONModel.AzureTraces.Filters, &resultFormat, queryResources)
traceLogsExploreQuery = buildTracesLogsQuery(operationId, queryResources) traceLogsExploreQuery = buildTracesLogsQuery(operationId, queryResources)
} }
traceExploreQuery, err = macros.KqlInterpolate(logger, query, dsInfo, traceExploreQuery, "TimeGenerated") traceExploreQuery, err = macros.KqlInterpolate(query, dsInfo, traceExploreQuery, "TimeGenerated")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create traces explore query: %s", err) return nil, fmt.Errorf("failed to create traces explore query: %s", err)
} }
traceParentExploreQuery, err = macros.KqlInterpolate(logger, query, dsInfo, traceParentExploreQuery, "TimeGenerated") traceParentExploreQuery, err = macros.KqlInterpolate(query, dsInfo, traceParentExploreQuery, "TimeGenerated")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create parent span traces explore query: %s", err) return nil, fmt.Errorf("failed to create parent span traces explore query: %s", err)
} }
traceLogsExploreQuery, err = macros.KqlInterpolate(logger, query, dsInfo, traceLogsExploreQuery, "TimeGenerated") traceLogsExploreQuery, err = macros.KqlInterpolate(query, dsInfo, traceLogsExploreQuery, "TimeGenerated")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create traces logs explore query: %s", err) return nil, fmt.Errorf("failed to create traces logs explore query: %s", err)
} }
@ -222,7 +223,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
apiURL := getApiURL(resourceOrWorkspace, appInsightsQuery) apiURL := getApiURL(resourceOrWorkspace, appInsightsQuery)
rawQuery, err := macros.KqlInterpolate(logger, query, dsInfo, queryString, "TimeGenerated") rawQuery, err := macros.KqlInterpolate(query, dsInfo, queryString, "TimeGenerated")
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -247,45 +248,27 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, logger l
return azureLogAnalyticsQueries, nil return azureLogAnalyticsQueries, nil
} }
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, logger log.Logger, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client, func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.DataResponse, error) {
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
dataResponseErrorWithExecuted := func(err error) backend.DataResponse {
dataResponse.Error = err
dataResponse.Frames = data.Frames{
&data.Frame{
RefID: query.RefID,
Meta: &data.FrameMeta{
ExecutedQueryString: query.Query,
},
},
}
return dataResponse
}
// If azureLogAnalyticsSameAs is defined and set to false, return an error // If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs, ok := dsInfo.JSONData["azureLogAnalyticsSameAs"]; ok && !sameAs.(bool) { if sameAs, ok := dsInfo.JSONData["azureLogAnalyticsSameAs"]; ok && !sameAs.(bool) {
return dataResponseErrorWithExecuted(fmt.Errorf("credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials")) return nil, fmt.Errorf("credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials")
} }
queryJSONModel := dataquery.AzureMonitorQuery{} queryJSONModel := dataquery.AzureMonitorQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) { if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) {
if dataquery.ResultFormat(query.ResultFormat) == (dataquery.ResultFormatTrace) && query.Query == "" { if dataquery.ResultFormat(query.ResultFormat) == (dataquery.ResultFormatTrace) && query.Query == "" {
return dataResponseErrorWithExecuted(fmt.Errorf("cannot visualise trace events using the trace visualiser")) return nil, fmt.Errorf("cannot visualise trace events using the trace visualiser")
} }
} }
req, err := e.createRequest(ctx, logger, url, query) req, err := e.createRequest(ctx, url, query)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
ctx, span := tracer.Start(ctx, "azure log analytics query") ctx, span := tracer.Start(ctx, "azure log analytics query")
@ -299,42 +282,39 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, logger l
tracer.Inject(ctx, req.Header, span) tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
res, err := client.Do(req) res, err := client.Do(req)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
defer func() { defer func() {
err := res.Body.Close() err := res.Body.Close()
if err != nil { backend.Logger.Error("Failed to close response body", "err", err)
logger.Warn("failed to close response body", "error", err)
}
}() }()
logResponse, err := e.unmarshalResponse(logger, res) logResponse, err := e.unmarshalResponse(res)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
t, err := logResponse.GetPrimaryResultTable() t, err := logResponse.GetPrimaryResultTable()
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
frame, err := ResponseTableToFrame(t, query.RefID, query.Query, dataquery.AzureQueryType(query.QueryType), dataquery.ResultFormat(query.ResultFormat)) frame, err := ResponseTableToFrame(t, query.RefID, query.Query, dataquery.AzureQueryType(query.QueryType), dataquery.ResultFormat(query.ResultFormat))
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
frame = appendErrorNotice(frame, logResponse.Error) frame = appendErrorNotice(frame, logResponse.Error)
if frame == nil { if frame == nil {
return dataResponse dataResponse := backend.DataResponse{}
return &dataResponse, nil
} }
azurePortalBaseUrl, err := GetAzurePortalUrl(dsInfo.Cloud) azurePortalBaseUrl, err := GetAzurePortalUrl(dsInfo.Cloud)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) && query.ResultFormat == string(dataquery.ResultFormatTrace) { if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) && query.ResultFormat == string(dataquery.ResultFormatTrace) {
@ -359,22 +339,19 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, logger l
queryUrl, err := getQueryUrl(query.Query, query.Resources, azurePortalBaseUrl, query.TimeRange) queryUrl, err := getQueryUrl(query.Query, query.Resources, azurePortalBaseUrl, query.TimeRange)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) { if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) {
tracesUrl, err := getTracesQueryUrl(query.Resources, azurePortalBaseUrl) tracesUrl, err := getTracesQueryUrl(query.Resources, azurePortalBaseUrl)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
queryJSONModel := dataquery.AzureMonitorQuery{} queryJSONModel := dataquery.AzureMonitorQuery{}
err = json.Unmarshal(query.JSON, &queryJSONModel) err = json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
traceIdVariable := "${__data.fields.traceID}" traceIdVariable := "${__data.fields.traceID}"
resultFormat := dataquery.ResultFormatTrace resultFormat := dataquery.ResultFormatTrace
@ -434,9 +411,8 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, logger l
} else { } else {
AddConfigLinks(*frame, queryUrl, nil) AddConfigLinks(*frame, queryUrl, nil)
} }
dataResponse := backend.DataResponse{Frames: data.Frames{frame}}
dataResponse.Frames = data.Frames{frame} return &dataResponse, nil
return dataResponse
} }
func appendErrorNotice(frame *data.Frame, err *AzureLogAnalyticsAPIError) *data.Frame { func appendErrorNotice(frame *data.Frame, err *AzureLogAnalyticsAPIError) *data.Frame {
@ -450,7 +426,7 @@ func appendErrorNotice(frame *data.Frame, err *AzureLogAnalyticsAPIError) *data.
return frame return frame
} }
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, logger log.Logger, queryURL string, query *AzureLogAnalyticsQuery) (*http.Request, error) { func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, queryURL string, query *AzureLogAnalyticsQuery) (*http.Request, error) {
body := map[string]interface{}{ body := map[string]interface{}{
"query": query.Query, "query": query.Query,
} }
@ -475,7 +451,6 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, logger
req, err := http.NewRequestWithContext(ctx, http.MethodPost, queryURL, bytes.NewBuffer(jsonValue)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, queryURL, bytes.NewBuffer(jsonValue))
if err != nil { if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "failed to create request", err) return nil, fmt.Errorf("%v: %w", "failed to create request", err)
} }
req.URL.Path = "/" req.URL.Path = "/"
@ -552,14 +527,13 @@ func getTracesQueryUrl(resources []string, azurePortalUrl string) (string, error
return portalUrl, nil return portalUrl, nil
} }
func getCorrelationWorkspaces(ctx context.Context, logger log.Logger, baseResource string, resourcesMap map[string]bool, dsInfo types.DatasourceInfo, operationId string, tracer tracing.Tracer) (map[string]bool, error) { func getCorrelationWorkspaces(ctx context.Context, baseResource string, resourcesMap map[string]bool, dsInfo types.DatasourceInfo, operationId string, tracer tracing.Tracer) (map[string]bool, error) {
azMonService := dsInfo.Services["Azure Monitor"] azMonService := dsInfo.Services["Azure Monitor"]
correlationUrl := azMonService.URL + fmt.Sprintf("%s/providers/microsoft.insights/transactions/%s", baseResource, operationId) correlationUrl := azMonService.URL + fmt.Sprintf("%s/providers/microsoft.insights/transactions/%s", baseResource, operationId)
callCorrelationAPI := func(url string) (AzureCorrelationAPIResponse, error) { callCorrelationAPI := func(url string) (AzureCorrelationAPIResponse, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer([]byte{})) req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer([]byte{}))
if err != nil { if err != nil {
logger.Debug("Failed to create request", "error", err)
return AzureCorrelationAPIResponse{}, fmt.Errorf("%v: %w", "failed to create request", err) return AzureCorrelationAPIResponse{}, fmt.Errorf("%v: %w", "failed to create request", err)
} }
req.URL.Path = url req.URL.Path = url
@ -578,7 +552,6 @@ func getCorrelationWorkspaces(ctx context.Context, logger log.Logger, baseResour
tracer.Inject(ctx, req.Header, span) tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureLogAnalytics", "Traces Correlation ApiURL", req.URL.String())
res, err := azMonService.HTTPClient.Do(req) res, err := azMonService.HTTPClient.Do(req)
if err != nil { if err != nil {
return AzureCorrelationAPIResponse{}, err return AzureCorrelationAPIResponse{}, err
@ -587,15 +560,14 @@ func getCorrelationWorkspaces(ctx context.Context, logger log.Logger, baseResour
if err != nil { if err != nil {
return AzureCorrelationAPIResponse{}, err return AzureCorrelationAPIResponse{}, err
} }
defer func() { defer func() {
err := res.Body.Close() if err := res.Body.Close(); err != nil {
if err != nil { backend.Logger.Error("Failed to close response body", "err", err)
logger.Warn("failed to close response body", "error", err)
} }
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureCorrelationAPIResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)) return AzureCorrelationAPIResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body))
} }
var data AzureCorrelationAPIResponse var data AzureCorrelationAPIResponse
@ -603,7 +575,6 @@ func getCorrelationWorkspaces(ctx context.Context, logger log.Logger, baseResour
d.UseNumber() d.UseNumber()
err = d.Decode(&data) err = d.Decode(&data)
if err != nil { if err != nil {
logger.Debug("Failed to unmarshal Azure Traces correlation API response", "error", err, "status", res.Status, "body", string(body))
return AzureCorrelationAPIResponse{}, err return AzureCorrelationAPIResponse{}, err
} }
@ -689,19 +660,17 @@ func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*types.AzureRespon
return nil, fmt.Errorf("no data as PrimaryResult table is missing from the response") return nil, fmt.Errorf("no data as PrimaryResult table is missing from the response")
} }
func (e *AzureLogAnalyticsDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (AzureLogAnalyticsResponse, error) { func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (AzureLogAnalyticsResponse, error) {
body, err := io.ReadAll(res.Body) body, err := io.ReadAll(res.Body)
if err != nil { if err != nil {
return AzureLogAnalyticsResponse{}, err return AzureLogAnalyticsResponse{}, err
} }
defer func() { defer func() {
if err := res.Body.Close(); err != nil { err := res.Body.Close()
logger.Warn("Failed to close response body", "err", err) backend.Logger.Error("Failed to close response body", "err", err)
}
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureLogAnalyticsResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)) return AzureLogAnalyticsResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body))
} }
@ -710,7 +679,6 @@ func (e *AzureLogAnalyticsDatasource) unmarshalResponse(logger log.Logger, res *
d.UseNumber() d.UseNumber()
err = d.Decode(&data) err = d.Decode(&data)
if err != nil { if err != nil {
logger.Debug("Failed to unmarshal Azure Log Analytics response", "error", err, "status", res.Status, "body", string(body))
return AzureLogAnalyticsResponse{}, err return AzureLogAnalyticsResponse{}, err
} }

View File

@ -16,14 +16,11 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
var logger = log.New("test")
func TestBuildingAzureLogAnalyticsQueries(t *testing.T) { func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{} datasource := &AzureLogAnalyticsDatasource{}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
@ -1396,7 +1393,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(ctx, logger, tt.queryModel, dsInfo, tracer) queries, err := datasource.buildQueries(ctx, tt.queryModel, dsInfo, tracer)
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" { if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" {
t.Errorf("Result mismatch (-want +got): \n%s", diff) t.Errorf("Result mismatch (-want +got): \n%s", diff)
@ -1411,7 +1408,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
t.Run("creates a request", func(t *testing.T) { t.Run("creates a request", func(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
req, err := ds.createRequest(ctx, logger, url, &AzureLogAnalyticsQuery{ req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{
Resources: []string{"r"}, Resources: []string{"r"},
Query: "Perf", Query: "Perf",
IntersectTime: false, IntersectTime: false,
@ -1435,7 +1432,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
t.Run("creates a request with timespan", func(t *testing.T) { t.Run("creates a request with timespan", func(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
req, err := ds.createRequest(ctx, logger, url, &AzureLogAnalyticsQuery{ req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{
Resources: []string{"r"}, Resources: []string{"r"},
Query: "Perf", Query: "Perf",
IntersectTime: true, IntersectTime: true,
@ -1459,7 +1456,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
t.Run("creates a request with multiple resources", func(t *testing.T) { t.Run("creates a request with multiple resources", func(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
req, err := ds.createRequest(ctx, logger, url, &AzureLogAnalyticsQuery{ req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{
Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"}, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"},
Query: "Perf", Query: "Perf",
QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics),
@ -1479,7 +1476,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
from := time.Now() from := time.Now()
to := from.Add(3 * time.Hour) to := from.Add(3 * time.Hour)
req, err := ds.createRequest(ctx, logger, url, &AzureLogAnalyticsQuery{ req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{
Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"}, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"},
Query: "Perf", Query: "Perf",
QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics),
@ -1503,7 +1500,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
from := time.Now() from := time.Now()
to := from.Add(3 * time.Hour) to := from.Add(3 * time.Hour)
req, err := ds.createRequest(ctx, logger, url, &AzureLogAnalyticsQuery{ req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{
Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r2"}, Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.Insights/components/r2"},
QueryType: string(dataquery.AzureQueryTypeAzureTraces), QueryType: string(dataquery.AzureQueryTypeAzureTraces),
TimeRange: backend.TimeRange{ TimeRange: backend.TimeRange{
@ -1538,11 +1535,8 @@ func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
TimeRange: backend.TimeRange{}, TimeRange: backend.TimeRange{},
} }
tracer := tracing.InitializeTracerForTest() tracer := tracing.InitializeTracerForTest()
res := ds.executeQuery(ctx, logger, query, dsInfo, &http.Client{}, dsInfo.Services["Azure Log Analytics"].URL, tracer) _, err := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services["Azure Log Analytics"].URL, tracer)
if res.Error == nil { if !strings.Contains(err.Error(), "credentials for Log Analytics are no longer supported") {
t.Fatal("expecting an error")
}
if !strings.Contains(res.Error.Error(), "credentials for Log Analytics are no longer supported") {
t.Error("expecting the error to inform of bad credentials") t.Error("expecting the error to inform of bad credentials")
} }
} }

View File

@ -9,7 +9,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/tsdb/intervalv2" "github.com/grafana/grafana/pkg/tsdb/intervalv2"
@ -34,17 +33,17 @@ type kqlMacroEngine struct {
// - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total' // - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total'
// KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries // KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
func KqlInterpolate(logger log.Logger, query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField ...string) (string, error) { func KqlInterpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField ...string) (string, error) {
engine := kqlMacroEngine{} engine := kqlMacroEngine{}
defaultTimeFieldForAllDatasources := "timestamp" defaultTimeFieldForAllDatasources := "timestamp"
if len(defaultTimeField) > 0 && query.QueryType != string(dataquery.AzureQueryTypeAzureTraces) { if len(defaultTimeField) > 0 && query.QueryType != string(dataquery.AzureQueryTypeAzureTraces) {
defaultTimeFieldForAllDatasources = defaultTimeField[0] defaultTimeFieldForAllDatasources = defaultTimeField[0]
} }
return engine.Interpolate(logger, query, dsInfo, kql, defaultTimeFieldForAllDatasources) return engine.Interpolate(query, dsInfo, kql, defaultTimeFieldForAllDatasources)
} }
func (m *kqlMacroEngine) Interpolate(logger log.Logger, query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField string) (string, error) { func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField string) (string, error) {
m.timeRange = query.TimeRange m.timeRange = query.TimeRange
m.query = query m.query = query
rExp, _ := regexp.Compile(sExpr) rExp, _ := regexp.Compile(sExpr)
@ -74,7 +73,7 @@ func (m *kqlMacroEngine) Interpolate(logger log.Logger, query backend.DataQuery,
for i, arg := range args { for i, arg := range args {
args[i] = strings.Trim(arg, " ") args[i] = strings.Trim(arg, " ")
} }
res, err := m.evaluateMacro(logger, groups[1], defaultTimeField, args, dsInfo) res, err := m.evaluateMacro(groups[1], defaultTimeField, args, dsInfo)
if err != nil && macroError == nil { if err != nil && macroError == nil {
macroError = err macroError = err
return "macro_error()" return "macro_error()"
@ -94,7 +93,7 @@ type interval struct {
Interval string Interval string
} }
func (m *kqlMacroEngine) evaluateMacro(logger log.Logger, name string, defaultTimeField string, args []string, dsInfo types.DatasourceInfo) (string, error) { func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo types.DatasourceInfo) (string, error) {
switch name { switch name {
case "timeFilter": case "timeFilter":
timeColumn := defaultTimeField timeColumn := defaultTimeField
@ -118,7 +117,6 @@ func (m *kqlMacroEngine) evaluateMacro(logger log.Logger, name string, defaultTi
var queryInterval interval var queryInterval interval
err := json.Unmarshal(m.query.JSON, &queryInterval) err := json.Unmarshal(m.query.JSON, &queryInterval)
if err != nil { if err != nil {
logger.Warn("Unable to parse model from query", "JSON", m.query.JSON)
it = defaultInterval it = defaultInterval
} else { } else {
var ( var (
@ -130,7 +128,6 @@ func (m *kqlMacroEngine) evaluateMacro(logger log.Logger, name string, defaultTi
} }
it, err = intervalv2.GetIntervalFrom(dsInterval, queryInterval.Interval, queryInterval.IntervalMs, defaultInterval) it, err = intervalv2.GetIntervalFrom(dsInterval, queryInterval.Interval, queryInterval.IntervalMs, defaultInterval)
if err != nil { if err != nil {
logger.Warn("Unable to get interval from query", "model", queryInterval)
it = defaultInterval it = defaultInterval
} }
} }

View File

@ -9,7 +9,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
@ -138,7 +137,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
defaultTimeField := "TimeGenerated" defaultTimeField := "TimeGenerated"
rawQuery, err := KqlInterpolate(log.New("test"), tt.query, types.DatasourceInfo{}, tt.kql, defaultTimeField) rawQuery, err := KqlInterpolate(tt.query, types.DatasourceInfo{}, tt.kql, defaultTimeField)
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" { if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)

View File

@ -17,10 +17,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time" azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
@ -41,31 +39,35 @@ var (
const AzureMonitorAPIVersion = "2021-05-01" const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
e.Proxy.Do(rw, req, cli) return e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query // 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo) queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, query := range queries { for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer) res, err := e.executeQuery(ctx, query, dsInfo, client, url, tracer)
if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err}
continue
}
result.Responses[query.RefID] = *res
} }
return result, nil return result, nil
} }
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) { func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{} azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries { for _, query := range queries {
@ -173,10 +175,6 @@ func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backe
} }
target = params.Encode() target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
sub := "" sub := ""
if queryJSONModel.Subscription != nil { if queryJSONModel.Subscription != nil {
sub = *queryJSONModel.Subscription sub = *queryJSONModel.Subscription
@ -245,8 +243,8 @@ func getParams(azJSONModel *dataquery.AzureMetricQuery, query backend.DataQuery)
return params, nil return params, nil
} }
func (e *AzureMonitorDatasource) retrieveSubscriptionDetails(cli *http.Client, ctx context.Context, logger log.Logger, tracer tracing.Tracer, subscriptionId string, baseUrl string, dsId int64, orgId int64) (string, error) { func (e *AzureMonitorDatasource) retrieveSubscriptionDetails(cli *http.Client, ctx context.Context, tracer tracing.Tracer, subscriptionId string, baseUrl string, dsId int64, orgId int64) (string, error) {
req, err := e.createRequest(ctx, logger, fmt.Sprintf("%s/subscriptions/%s", baseUrl, subscriptionId)) req, err := e.createRequest(ctx, fmt.Sprintf("%s/subscriptions/%s", baseUrl, subscriptionId))
if err != nil { if err != nil {
return "", fmt.Errorf("failed to retrieve subscription details for subscription %s: %s", subscriptionId, err) return "", fmt.Errorf("failed to retrieve subscription details for subscription %s: %s", subscriptionId, err)
} }
@ -261,15 +259,14 @@ func (e *AzureMonitorDatasource) retrieveSubscriptionDetails(cli *http.Client, c
defer span.End() defer span.End()
tracer.Inject(ctx, req.Header, span) tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Subscription Details Req")
res, err := cli.Do(req) res, err := cli.Do(req)
if err != nil { if err != nil {
return "", fmt.Errorf("failed to request subscription details: %s", err) return "", fmt.Errorf("failed to request subscription details: %s", err)
} }
defer func() { defer func() {
if err := res.Body.Close(); err != nil { err := res.Body.Close()
logger.Warn("failed to close response body", "err", err) backend.Logger.Error("Failed to close response body", "err", err)
}
}() }()
body, err := io.ReadAll(res.Body) body, err := io.ReadAll(res.Body)
@ -290,14 +287,11 @@ func (e *AzureMonitorDatasource) retrieveSubscriptionDetails(cli *http.Client, c
return data.DisplayName, nil return data.DisplayName, nil
} }
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client, func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse { url string, tracer tracing.Tracer) (*backend.DataResponse, error) {
dataResponse := backend.DataResponse{} req, err := e.createRequest(ctx, url)
req, err := e.createRequest(ctx, logger, url)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
req.URL.Path = path.Join(req.URL.Path, query.URL) req.URL.Path = path.Join(req.URL.Path, query.URL)
@ -317,49 +311,43 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Lo
defer span.End() defer span.End()
tracer.Inject(ctx, req.Header, span) tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req) res, err := cli.Do(req)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
defer func() { defer func() {
if err := res.Body.Close(); err != nil { err := res.Body.Close()
logger.Warn("failed to close response body", "err", err) backend.Logger.Error("Failed to close response body", "err", err)
}
}() }()
data, err := e.unmarshalResponse(logger, res) data, err := e.unmarshalResponse(res)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
azurePortalUrl, err := loganalytics.GetAzurePortalUrl(dsInfo.Cloud) azurePortalUrl, err := loganalytics.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
subscription, err := e.retrieveSubscriptionDetails(cli, ctx, logger, tracer, query.Subscription, dsInfo.Routes["Azure Monitor"].URL, dsInfo.DatasourceID, dsInfo.OrgID) subscription, err := e.retrieveSubscriptionDetails(cli, ctx, tracer, query.Subscription, dsInfo.Routes["Azure Monitor"].URL, dsInfo.DatasourceID, dsInfo.OrgID)
if err != nil { if err != nil {
logger.Warn(err.Error()) return nil, err
} }
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl, subscription) frames, err := e.parseResponse(data, query, azurePortalUrl, subscription)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
return dataResponse dataResponse := backend.DataResponse{Frames: frames}
return &dataResponse, nil
} }
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) { func (e *AzureMonitorDatasource) createRequest(ctx context.Context, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil { if err != nil {
logger.Debug("failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "failed to create request", err) return nil, fmt.Errorf("%v: %w", "failed to create request", err)
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
@ -367,21 +355,19 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.L
return req, nil return req, nil
} }
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) { func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body) body, err := io.ReadAll(res.Body)
if err != nil { if err != nil {
return types.AzureMonitorResponse{}, err return types.AzureMonitorResponse{}, err
} }
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s, error: %s", res.Status, string(body)) return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s, error: %s", res.Status, string(body))
} }
var data types.AzureMonitorResponse var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data) err = json.Unmarshal(body, &data)
if err != nil { if err != nil {
logger.Debug("failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err return types.AzureMonitorResponse{}, err
} }

View File

@ -19,7 +19,6 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/testdata" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/testdata"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time" azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
@ -294,7 +293,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
}, },
} }
queries, err := datasource.buildQueries(log.New("test"), tsdbQuery, dsInfo) queries, err := datasource.buildQueries(tsdbQuery, dsInfo)
require.NoError(t, err) require.NoError(t, err)
resources := map[string]dataquery.AzureMonitorResource{} resources := map[string]dataquery.AzureMonitorResource{}
@ -359,7 +358,7 @@ func TestCustomNamespace(t *testing.T) {
}, },
} }
result, err := datasource.buildQueries(log.New("test"), q, types.DatasourceInfo{}) result, err := datasource.buildQueries(q, types.DatasourceInfo{})
require.NoError(t, err) require.NoError(t, err)
expected := "custom/namespace" expected := "custom/namespace"
require.Equal(t, expected, result[0].Params.Get("metricnamespace")) require.Equal(t, expected, result[0].Params.Get("metricnamespace"))
@ -690,7 +689,7 @@ func TestAzureMonitorCreateRequest(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
ds := AzureMonitorDatasource{} ds := AzureMonitorDatasource{}
req, err := ds.createRequest(ctx, log.New("test"), url) req, err := ds.createRequest(ctx, url)
tt.Err(t, err) tt.Err(t, err)
if req.URL.String() != tt.expectedURL { if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String()) t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())

View File

@ -15,7 +15,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
@ -48,27 +47,31 @@ type AzureResourceGraphQuery struct {
const ArgAPIVersion = "2021-06-01-preview" const ArgAPIVersion = "2021-06-01-preview"
const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources" const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources"
func (e *AzureResourceGraphDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureResourceGraphDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error) {
e.Proxy.Do(rw, req, cli) return e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. builds the AzureMonitor url and querystring for each query // 1. builds the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureResourceGraphDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { func (e *AzureResourceGraphDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := &backend.QueryDataResponse{ result := &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{}, Responses: map[string]backend.DataResponse{},
} }
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo) queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, query := range queries { for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer) res, err := e.executeQuery(ctx, query, dsInfo, client, url, tracer)
if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err}
continue
}
result.Responses[query.RefID] = *res
} }
return result, nil return result, nil
@ -81,7 +84,7 @@ type argJSONQuery struct {
} `json:"azureResourceGraph"` } `json:"azureResourceGraph"`
} }
func (e *AzureResourceGraphDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) { func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery var azureResourceGraphQueries []*AzureResourceGraphQuery
for _, query := range queries { for _, query := range queries {
@ -92,14 +95,13 @@ func (e *AzureResourceGraphDatasource) buildQueries(logger log.Logger, queries [
} }
azureResourceGraphTarget := queryJSONModel.AzureResourceGraph azureResourceGraphTarget := queryJSONModel.AzureResourceGraph
logger.Debug("AzureResourceGraph", "target", azureResourceGraphTarget)
resultFormat := azureResourceGraphTarget.ResultFormat resultFormat := azureResourceGraphTarget.ResultFormat
if resultFormat == "" { if resultFormat == "" {
resultFormat = "table" resultFormat = "table"
} }
interpolatedQuery, err := macros.KqlInterpolate(logger, query, dsInfo, azureResourceGraphTarget.Query) interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil { if err != nil {
return nil, err return nil, err
@ -118,31 +120,15 @@ func (e *AzureResourceGraphDatasource) buildQueries(logger log.Logger, queries [
return azureResourceGraphQueries, nil return azureResourceGraphQueries, nil
} }
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, logger log.Logger, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client, func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client,
dsURL string, tracer tracing.Tracer) backend.DataResponse { dsURL string, tracer tracing.Tracer) (*backend.DataResponse, error) {
dataResponse := backend.DataResponse{}
params := url.Values{} params := url.Values{}
params.Add("api-version", ArgAPIVersion) params.Add("api-version", ArgAPIVersion)
dataResponseErrorWithExecuted := func(err error) backend.DataResponse {
dataResponse = backend.DataResponse{Error: err}
frames := data.Frames{
&data.Frame{
RefID: query.RefID,
Meta: &data.FrameMeta{
ExecutedQueryString: query.InterpolatedQuery,
},
},
}
dataResponse.Frames = frames
return dataResponse
}
var model dataquery.AzureMonitorQuery var model dataquery.AzureMonitorQuery
err := json.Unmarshal(query.JSON, &model) err := json.Unmarshal(query.JSON, &model)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
reqBody, err := json.Marshal(map[string]interface{}{ reqBody, err := json.Marshal(map[string]interface{}{
@ -152,15 +138,13 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, logger
}) })
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
req, err := e.createRequest(ctx, logger, reqBody, dsURL) req, err := e.createRequest(ctx, reqBody, dsURL)
if err != nil { if err != nil {
dataResponse.Error = err return nil, err
return dataResponse
} }
req.URL.Path = path.Join(req.URL.Path, argQueryProviderName) req.URL.Path = path.Join(req.URL.Path, argQueryProviderName)
@ -177,36 +161,34 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, logger
tracer.Inject(ctx, req.Header, span) tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureResourceGraph", "Request ApiURL", req.URL.String())
res, err := client.Do(req) res, err := client.Do(req)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
defer func() { defer func() {
err := res.Body.Close() err := res.Body.Close()
if err != nil { backend.Logger.Error("Failed to close response body", "err", err)
logger.Warn("failed to close response body", "error", err)
}
}() }()
argResponse, err := e.unmarshalResponse(logger, res) argResponse, err := e.unmarshalResponse(res)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
frame, err := loganalytics.ResponseTableToFrame(&argResponse.Data, query.RefID, query.InterpolatedQuery, dataquery.AzureQueryType(query.QueryType), dataquery.ResultFormat(query.ResultFormat)) frame, err := loganalytics.ResponseTableToFrame(&argResponse.Data, query.RefID, query.InterpolatedQuery, dataquery.AzureQueryType(query.QueryType), dataquery.ResultFormat(query.ResultFormat))
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
if frame == nil { if frame == nil {
// empty response // empty response
return dataResponse dataResponse := backend.DataResponse{}
return &dataResponse, nil
} }
azurePortalUrl, err := loganalytics.GetAzurePortalUrl(dsInfo.Cloud) azurePortalUrl, err := loganalytics.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return nil, err
} }
url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery) url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery)
@ -216,14 +198,14 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, logger
} }
frameWithLink.Meta.ExecutedQueryString = req.URL.RawQuery frameWithLink.Meta.ExecutedQueryString = req.URL.RawQuery
dataResponse := backend.DataResponse{}
dataResponse.Frames = data.Frames{&frameWithLink} dataResponse.Frames = data.Frames{&frameWithLink}
return dataResponse return &dataResponse, nil
} }
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, logger log.Logger, reqBody []byte, url string) (*http.Request, error) { func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, reqBody []byte, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(reqBody)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(reqBody))
if err != nil { if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "failed to create request", err) return nil, fmt.Errorf("%v: %w", "failed to create request", err)
} }
req.URL.Path = "/" req.URL.Path = "/"
@ -232,19 +214,18 @@ func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, logger
return req, nil return req, nil
} }
func (e *AzureResourceGraphDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (AzureResourceGraphResponse, error) { func (e *AzureResourceGraphDatasource) unmarshalResponse(res *http.Response) (AzureResourceGraphResponse, error) {
body, err := io.ReadAll(res.Body) body, err := io.ReadAll(res.Body)
if err != nil { if err != nil {
return AzureResourceGraphResponse{}, err return AzureResourceGraphResponse{}, err
} }
defer func() { defer func() {
if err := res.Body.Close(); err != nil { err := res.Body.Close()
logger.Warn("Failed to close response body", "err", err) backend.Logger.Error("Failed to close response body", "err", err)
}
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureResourceGraphResponse{}, fmt.Errorf("%s. Azure Resource Graph error: %s", res.Status, string(body)) return AzureResourceGraphResponse{}, fmt.Errorf("%s. Azure Resource Graph error: %s", res.Status, string(body))
} }
@ -253,7 +234,6 @@ func (e *AzureResourceGraphDatasource) unmarshalResponse(logger log.Logger, res
d.UseNumber() d.UseNumber()
err = d.Decode(&data) err = d.Decode(&data)
if err != nil { if err != nil {
logger.Debug("Failed to unmarshal azure resource graph response", "error", err, "status", res.Status, "body", string(body))
return AzureResourceGraphResponse{}, err return AzureResourceGraphResponse{}, err
} }

View File

@ -17,13 +17,10 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
var logger = log.New("test")
func TestBuildingAzureResourceGraphQueries(t *testing.T) { func TestBuildingAzureResourceGraphQueries(t *testing.T) {
datasource := &AzureResourceGraphDatasource{} datasource := &AzureResourceGraphDatasource{}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
@ -74,7 +71,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(logger, tt.queryModel, types.DatasourceInfo{}) queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{})
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(struct{}{})); diff != "" { if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(struct{}{})); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -106,7 +103,7 @@ func TestAzureResourceGraphCreateRequest(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
ds := AzureResourceGraphDatasource{} ds := AzureResourceGraphDatasource{}
req, err := ds.createRequest(ctx, logger, []byte{}, url) req, err := ds.createRequest(ctx, []byte{}, url)
tt.Err(t, err) tt.Err(t, err)
if req.URL.String() != tt.expectedURL { if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String()) t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
@ -158,7 +155,7 @@ func TestGetAzurePortalUrl(t *testing.T) {
func TestUnmarshalResponse400(t *testing.T) { func TestUnmarshalResponse400(t *testing.T) {
datasource := &AzureResourceGraphDatasource{} datasource := &AzureResourceGraphDatasource{}
res, err := datasource.unmarshalResponse(logger, &http.Response{ res, err := datasource.unmarshalResponse(&http.Response{
StatusCode: 400, StatusCode: 400,
Status: "400 Bad Request", Status: "400 Bad Request",
Body: io.NopCloser(strings.NewReader(("Azure Error Message"))), Body: io.NopCloser(strings.NewReader(("Azure Error Message"))),
@ -172,7 +169,7 @@ func TestUnmarshalResponse400(t *testing.T) {
func TestUnmarshalResponse200Invalid(t *testing.T) { func TestUnmarshalResponse200Invalid(t *testing.T) {
datasource := &AzureResourceGraphDatasource{} datasource := &AzureResourceGraphDatasource{}
res, err := datasource.unmarshalResponse(logger, &http.Response{ res, err := datasource.unmarshalResponse(&http.Response{
StatusCode: 200, StatusCode: 200,
Status: "OK", Status: "OK",
Body: io.NopCloser(strings.NewReader(("Azure Data"))), Body: io.NopCloser(strings.NewReader(("Azure Data"))),
@ -187,7 +184,7 @@ func TestUnmarshalResponse200Invalid(t *testing.T) {
func TestUnmarshalResponse200(t *testing.T) { func TestUnmarshalResponse200(t *testing.T) {
datasource := &AzureResourceGraphDatasource{} datasource := &AzureResourceGraphDatasource{}
res, err2 := datasource.unmarshalResponse(logger, &http.Response{ res, err2 := datasource.unmarshalResponse(&http.Response{
StatusCode: 200, StatusCode: 200,
Status: "OK", Status: "OK",
Body: io.NopCloser(strings.NewReader("{}")), Body: io.NopCloser(strings.NewReader("{}")),

View File

@ -190,7 +190,7 @@ type MetricVisualization struct {
} }
type ServiceProxy interface { type ServiceProxy interface {
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) (http.ResponseWriter, error)
} }
type LogAnalyticsWorkspaceFeatures struct { type LogAnalyticsWorkspaceFeatures struct {