2021-08-19 12:38:31 +08:00
import { cloneDeep , find , first as _first , isNumber , isObject , isString , map as _map } from 'lodash' ;
import { generate , lastValueFrom , Observable , of , throwError } from 'rxjs' ;
2022-12-08 18:28:40 +08:00
import { catchError , first , map , mergeMap , skipWhile , throwIfEmpty , tap } from 'rxjs/operators' ;
2022-04-22 21:33:13 +08:00
2019-12-12 00:40:56 +08:00
import {
DataFrame ,
2020-07-01 15:45:21 +08:00
DataLink ,
2021-01-07 18:26:56 +08:00
DataQueryRequest ,
DataQueryResponse ,
DataSourceInstanceSettings ,
2021-10-01 19:38:16 +08:00
DataSourceWithLogsContextSupport ,
2021-12-14 21:36:47 +08:00
DataSourceWithQueryImportSupport ,
2023-01-20 21:20:49 +08:00
DataSourceWithSupplementaryQueriesSupport ,
2021-01-07 18:26:56 +08:00
DateTime ,
dateTime ,
2020-12-10 19:19:14 +08:00
Field ,
2021-01-07 18:26:56 +08:00
getDefaultTimeRange ,
2021-12-14 21:36:47 +08:00
AbstractQuery ,
2021-11-02 18:53:47 +08:00
LogLevel ,
2021-01-07 18:26:56 +08:00
LogRowModel ,
2020-12-04 22:29:40 +08:00
MetricFindValue ,
2021-01-07 18:26:56 +08:00
ScopedVars ,
2020-12-17 19:24:20 +08:00
TimeRange ,
2021-01-07 18:26:56 +08:00
toUtc ,
2022-07-18 20:13:34 +08:00
QueryFixAction ,
2022-11-29 00:26:45 +08:00
CoreApp ,
2023-01-20 21:20:49 +08:00
SupplementaryQueryType ,
2019-12-12 00:40:56 +08:00
} from '@grafana/data' ;
2022-11-29 00:26:45 +08:00
import { BackendSrvRequest , DataSourceWithBackend , getBackendSrv , getDataSourceSrv , config } from '@grafana/runtime' ;
2022-06-30 03:04:30 +08:00
import { queryLogsVolume } from 'app/core/logsModel' ;
2022-08-25 15:29:15 +08:00
import { getTimeSrv , TimeSrv } from 'app/features/dashboard/services/TimeSrv' ;
2022-04-22 21:33:13 +08:00
import { getTemplateSrv , TemplateSrv } from 'app/features/templating/template_srv' ;
2022-09-19 16:51:46 +08:00
import { RowContextOptions } from '../../../features/logs/components/LogRowContextProvider' ;
2022-09-30 18:16:47 +08:00
import { getLogLevelFromKey } from '../../../features/logs/utils' ;
2022-09-19 16:51:46 +08:00
2022-09-26 22:53:25 +08:00
import { ElasticResponse } from './ElasticResponse' ;
2022-09-22 22:04:16 +08:00
import { IndexPattern } from './IndexPattern' ;
2022-09-20 17:34:07 +08:00
import LanguageProvider from './LanguageProvider' ;
2022-09-21 15:38:25 +08:00
import { ElasticQueryBuilder } from './QueryBuilder' ;
2022-05-31 16:26:40 +08:00
import { ElasticsearchAnnotationsQueryEditor } from './components/QueryEditor/AnnotationQueryEditor' ;
2023-02-04 01:04:12 +08:00
import { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations' ;
2022-04-22 21:33:13 +08:00
import { bucketAggregationConfig } from './components/QueryEditor/BucketAggregationsEditor/utils' ;
2020-12-04 22:29:40 +08:00
import {
isMetricAggregationWithField ,
isPipelineAggregationWithMultipleBucketPaths ,
} from './components/QueryEditor/MetricAggregationsEditor/aggregations' ;
2022-04-22 21:33:13 +08:00
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils' ;
2022-09-21 16:25:04 +08:00
import { defaultBucketAgg , hasMetricOfType } from './queryDef' ;
2022-12-08 18:28:40 +08:00
import { trackQuery } from './tracking' ;
2023-02-04 01:04:12 +08:00
import { Logs , BucketAggregation , DataLinkConfig , ElasticsearchOptions , ElasticsearchQuery , TermsQuery } from './types' ;
2022-05-05 22:16:34 +08:00
import { coerceESVersion , getScriptValue , isSupportedVersion } from './utils' ;
2017-09-28 18:52:39 +08:00
2022-12-08 18:28:40 +08:00
export const REF_ID_STARTER_LOG_VOLUME = 'log-volume-' ;
2020-09-15 16:14:47 +08:00
// Those are metadata fields as defined in https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-fields.html#_identity_metadata_fields.
// custom fields can start with underscores, therefore is not safe to exclude anything that starts with one.
const ELASTIC_META_FIELDS = [
'_index' ,
'_type' ,
'_id' ,
'_source' ,
'_size' ,
'_field_names' ,
'_ignored' ,
'_routing' ,
'_meta' ,
] ;
2021-10-01 19:38:16 +08:00
export class ElasticDatasource
2022-11-29 00:26:45 +08:00
extends DataSourceWithBackend < ElasticsearchQuery , ElasticsearchOptions >
2021-12-14 21:36:47 +08:00
implements
DataSourceWithLogsContextSupport ,
DataSourceWithQueryImportSupport < ElasticsearchQuery > ,
2023-01-20 21:20:49 +08:00
DataSourceWithSupplementaryQueriesSupport < ElasticsearchQuery >
2022-02-02 20:02:32 +08:00
{
2020-07-08 17:05:20 +08:00
basicAuth? : string ;
withCredentials? : boolean ;
2017-09-28 18:52:39 +08:00
url : string ;
name : string ;
index : string ;
timeField : string ;
2021-05-11 16:44:00 +08:00
esVersion : string ;
2021-06-04 18:07:59 +08:00
xpack : boolean ;
2017-09-28 18:52:39 +08:00
interval : string ;
2020-07-08 17:05:20 +08:00
maxConcurrentShardRequests? : number ;
2017-09-28 18:52:39 +08:00
queryBuilder : ElasticQueryBuilder ;
indexPattern : IndexPattern ;
2019-06-25 04:15:03 +08:00
logMessageField? : string ;
logLevelField? : string ;
2019-12-12 00:40:56 +08:00
dataLinks : DataLinkConfig [ ] ;
2020-09-08 18:34:11 +08:00
languageProvider : LanguageProvider ;
2021-07-15 21:52:02 +08:00
includeFrozen : boolean ;
2022-05-18 17:04:47 +08:00
isProxyAccess : boolean ;
2022-08-25 15:29:15 +08:00
timeSrv : TimeSrv ;
2017-09-28 18:52:39 +08:00
2019-06-25 04:15:03 +08:00
constructor (
instanceSettings : DataSourceInstanceSettings < ElasticsearchOptions > ,
2020-12-17 19:24:20 +08:00
private readonly templateSrv : TemplateSrv = getTemplateSrv ( )
2019-06-25 04:15:03 +08:00
) {
super ( instanceSettings ) ;
2017-09-28 18:52:39 +08:00
this . basicAuth = instanceSettings . basicAuth ;
this . withCredentials = instanceSettings . withCredentials ;
2020-07-08 17:05:20 +08:00
this . url = instanceSettings . url ! ;
2017-09-28 18:52:39 +08:00
this . name = instanceSettings . name ;
2020-07-08 17:05:20 +08:00
this . index = instanceSettings . database ? ? '' ;
2022-05-18 17:04:47 +08:00
this . isProxyAccess = instanceSettings . access === 'proxy' ;
2019-06-25 04:15:03 +08:00
const settingsData = instanceSettings . jsonData || ( { } as ElasticsearchOptions ) ;
this . timeField = settingsData . timeField ;
2021-05-11 16:44:00 +08:00
this . esVersion = coerceESVersion ( settingsData . esVersion ) ;
2021-06-04 18:07:59 +08:00
this . xpack = Boolean ( settingsData . xpack ) ;
2019-06-25 04:15:03 +08:00
this . indexPattern = new IndexPattern ( this . index , settingsData . interval ) ;
this . interval = settingsData . timeInterval ;
this . maxConcurrentShardRequests = settingsData . maxConcurrentShardRequests ;
2017-09-28 18:52:39 +08:00
this . queryBuilder = new ElasticQueryBuilder ( {
timeField : this.timeField ,
} ) ;
2019-06-25 04:15:03 +08:00
this . logMessageField = settingsData . logMessageField || '' ;
this . logLevelField = settingsData . logLevelField || '' ;
2019-12-12 00:40:56 +08:00
this . dataLinks = settingsData . dataLinks || [ ] ;
2021-07-15 21:52:02 +08:00
this . includeFrozen = settingsData . includeFrozen ? ? false ;
2022-05-31 16:26:40 +08:00
this . annotations = {
QueryEditor : ElasticsearchAnnotationsQueryEditor ,
} ;
2019-06-25 04:15:03 +08:00
if ( this . logMessageField === '' ) {
2020-07-08 17:05:20 +08:00
this . logMessageField = undefined ;
2019-06-25 04:15:03 +08:00
}
if ( this . logLevelField === '' ) {
2020-07-08 17:05:20 +08:00
this . logLevelField = undefined ;
2019-06-25 04:15:03 +08:00
}
2020-09-08 18:34:11 +08:00
this . languageProvider = new LanguageProvider ( this ) ;
2022-08-25 15:29:15 +08:00
this . timeSrv = getTimeSrv ( ) ;
2017-09-28 18:52:39 +08:00
}
2021-03-30 06:41:45 +08:00
private request (
method : string ,
url : string ,
data? : undefined ,
headers? : BackendSrvRequest [ 'headers' ]
) : Observable < any > {
2022-05-18 17:04:47 +08:00
if ( ! this . isProxyAccess ) {
const error = new Error (
'Browser access mode in the Elasticsearch datasource is no longer available. Switch to server access mode.'
) ;
return throwError ( ( ) = > error ) ;
}
2022-05-05 22:16:34 +08:00
if ( ! isSupportedVersion ( this . esVersion ) ) {
const error = new Error (
'Support for Elasticsearch versions after their end-of-life (currently versions < 7.10) was removed.'
) ;
return throwError ( ( ) = > error ) ;
}
2021-03-30 06:41:45 +08:00
const options : BackendSrvRequest = {
2017-12-20 19:33:33 +08:00
url : this.url + '/' + url ,
2021-03-30 06:41:45 +08:00
method ,
data ,
headers ,
2017-09-28 18:52:39 +08:00
} ;
if ( this . basicAuth || this . withCredentials ) {
options . withCredentials = true ;
}
if ( this . basicAuth ) {
options . headers = {
2017-12-20 19:33:33 +08:00
Authorization : this.basicAuth ,
2017-09-28 18:52:39 +08:00
} ;
}
2020-06-09 23:32:47 +08:00
return getBackendSrv ( )
2021-01-11 13:47:51 +08:00
. fetch < any > ( options )
. pipe (
2021-01-20 14:59:48 +08:00
map ( ( results ) = > {
2021-01-11 13:47:51 +08:00
results . data . $ $config = results . config ;
return results . data ;
} ) ,
2021-01-20 14:59:48 +08:00
catchError ( ( err ) = > {
2021-02-02 20:21:24 +08:00
if ( err . data ) {
const message = err . data . error ? . reason ? ? err . data . message ? ? 'Unknown error' ;
2021-01-11 13:47:51 +08:00
return throwError ( {
2021-02-02 20:21:24 +08:00
message : 'Elasticsearch error: ' + message ,
2021-01-11 13:47:51 +08:00
error : err.data.error ,
} ) ;
}
return throwError ( err ) ;
} )
) ;
2017-09-28 18:52:39 +08:00
}
2021-12-14 21:36:47 +08:00
async importFromAbstractQueries ( abstractQueries : AbstractQuery [ ] ) : Promise < ElasticsearchQuery [ ] > {
return abstractQueries . map ( ( abstractQuery ) = > this . languageProvider . importFromAbstractQuery ( abstractQuery ) ) ;
2020-09-08 18:34:11 +08:00
}
2020-03-02 17:45:31 +08:00
/ * *
* Sends a GET request to the specified url on the newest matching and available index .
*
* When multiple indices span the provided time range , the request is sent starting from the newest index ,
* and then going backwards until an index is found .
*
* @param url the url to query the index on , for example ` /_mapping ` .
* /
2021-01-11 13:47:51 +08:00
private get ( url : string , range = getDefaultTimeRange ( ) ) : Observable < any > {
let indexList = this . indexPattern . getIndexList ( range . from , range . to ) ;
if ( ! Array . isArray ( indexList ) ) {
indexList = [ this . indexPattern . getIndexForToday ( ) ] ;
2017-09-28 18:52:39 +08:00
}
2021-01-11 13:47:51 +08:00
2021-01-20 14:59:48 +08:00
const indexUrlList = indexList . map ( ( index ) = > index + url ) ;
2021-01-11 13:47:51 +08:00
return this . requestAllIndices ( indexUrlList ) ;
2017-09-28 18:52:39 +08:00
}
2021-01-11 13:47:51 +08:00
private requestAllIndices ( indexList : string [ ] ) : Observable < any > {
2020-03-02 17:45:31 +08:00
const maxTraversals = 7 ; // do not go beyond one week (for a daily pattern)
const listLen = indexList . length ;
2021-01-11 13:47:51 +08:00
2021-08-19 12:38:31 +08:00
return generate ( {
initialState : 0 ,
condition : ( i ) = > i < Math . min ( listLen , maxTraversals ) ,
iterate : ( i ) = > i + 1 ,
} ) . pipe (
2021-01-20 14:59:48 +08:00
mergeMap ( ( index ) = > {
2021-01-11 13:47:51 +08:00
// catch all errors and emit an object with an err property to simplify checks later in the pipeline
2021-01-20 14:59:48 +08:00
return this . request ( 'GET' , indexList [ listLen - index - 1 ] ) . pipe ( catchError ( ( err ) = > of ( { err } ) ) ) ;
2021-01-11 13:47:51 +08:00
} ) ,
2021-08-19 12:38:31 +08:00
skipWhile ( ( resp ) = > resp ? . err ? . status === 404 ) , // skip all requests that fail because missing Elastic index
2021-01-11 13:47:51 +08:00
throwIfEmpty ( ( ) = > 'Could not find an available index for this time range.' ) , // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index
first ( ) , // take the first value that isn't skipped
2021-01-20 14:59:48 +08:00
map ( ( resp ) = > {
2021-01-11 13:47:51 +08:00
if ( resp . err ) {
throw resp . err ; // if there is some other error except 404 then we must throw it
2020-03-02 17:45:31 +08:00
}
2021-01-11 13:47:51 +08:00
return resp ;
} )
) ;
2020-03-02 17:45:31 +08:00
}
2021-01-11 13:47:51 +08:00
private post ( url : string , data : any ) : Observable < any > {
2021-03-30 06:41:45 +08:00
return this . request ( 'POST' , url , data , { 'Content-Type' : 'application/x-ndjson' } ) ;
2017-09-28 18:52:39 +08:00
}
2020-01-21 17:08:07 +08:00
annotationQuery ( options : any ) : Promise < any > {
2018-08-29 20:27:29 +08:00
const annotation = options . annotation ;
const timeField = annotation . timeField || '@timestamp' ;
2019-10-15 19:16:08 +08:00
const timeEndField = annotation . timeEndField || null ;
2021-11-24 22:09:10 +08:00
const queryString = annotation . query ;
2018-08-29 20:27:29 +08:00
const tagsField = annotation . tagsField || 'tags' ;
const textField = annotation . textField || null ;
2017-09-28 18:52:39 +08:00
2019-10-15 19:16:08 +08:00
const dateRanges = [ ] ;
const rangeStart : any = { } ;
rangeStart [ timeField ] = {
2017-09-28 18:52:39 +08:00
from : options . range . from . valueOf ( ) ,
to : options.range.to.valueOf ( ) ,
2017-12-20 19:33:33 +08:00
format : 'epoch_millis' ,
2017-09-28 18:52:39 +08:00
} ;
2019-10-15 19:16:08 +08:00
dateRanges . push ( { range : rangeStart } ) ;
if ( timeEndField ) {
const rangeEnd : any = { } ;
rangeEnd [ timeEndField ] = {
from : options . range . from . valueOf ( ) ,
to : options.range.to.valueOf ( ) ,
format : 'epoch_millis' ,
} ;
dateRanges . push ( { range : rangeEnd } ) ;
}
2017-09-28 18:52:39 +08:00
2021-11-24 22:09:10 +08:00
const queryInterpolated = this . interpolateLuceneQuery ( queryString ) ;
const query : any = {
2017-12-19 23:06:54 +08:00
bool : {
filter : [
2019-10-15 19:16:08 +08:00
{
bool : {
should : dateRanges ,
minimum_should_match : 1 ,
} ,
} ,
2017-12-20 19:33:33 +08:00
] ,
} ,
2017-09-28 18:52:39 +08:00
} ;
2021-11-24 22:09:10 +08:00
if ( queryInterpolated ) {
query . bool . filter . push ( {
query_string : {
query : queryInterpolated ,
} ,
} ) ;
}
2019-07-11 23:05:45 +08:00
const data : any = {
query ,
2017-12-20 19:33:33 +08:00
size : 10000 ,
2017-09-28 18:52:39 +08:00
} ;
2018-08-29 20:27:29 +08:00
const header : any = {
2017-12-20 19:33:33 +08:00
search_type : 'query_then_fetch' ,
ignore_unavailable : true ,
2017-12-19 23:06:54 +08:00
} ;
2017-09-28 18:52:39 +08:00
// old elastic annotations had index specified on them
if ( annotation . index ) {
header . index = annotation . index ;
} else {
2017-12-21 15:39:31 +08:00
header . index = this . indexPattern . getIndexList ( options . range . from , options . range . to ) ;
2017-09-28 18:52:39 +08:00
}
2020-12-04 22:29:40 +08:00
const payload = JSON . stringify ( header ) + '\n' + JSON . stringify ( data ) + '\n' ;
2017-09-28 18:52:39 +08:00
2021-08-19 12:38:31 +08:00
return lastValueFrom (
this . post ( '_msearch' , payload ) . pipe (
2021-01-20 14:59:48 +08:00
map ( ( res ) = > {
2021-01-11 13:47:51 +08:00
const list = [ ] ;
const hits = res . responses [ 0 ] . hits . hits ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
const getFieldFromSource = ( source : any , fieldName : any ) = > {
if ( ! fieldName ) {
return ;
}
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
const fieldNames = fieldName . split ( '.' ) ;
let fieldValue = source ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
for ( let i = 0 ; i < fieldNames . length ; i ++ ) {
fieldValue = fieldValue [ fieldNames [ i ] ] ;
if ( ! fieldValue ) {
console . log ( 'could not find field in annotation: ' , fieldName ) ;
return '' ;
}
}
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
return fieldValue ;
} ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
for ( let i = 0 ; i < hits . length ; i ++ ) {
const source = hits [ i ] . _source ;
let time = getFieldFromSource ( source , timeField ) ;
if ( typeof hits [ i ] . fields !== 'undefined' ) {
const fields = hits [ i ] . fields ;
2021-04-21 15:38:00 +08:00
if ( isString ( fields [ timeField ] ) || isNumber ( fields [ timeField ] ) ) {
2021-01-11 13:47:51 +08:00
time = fields [ timeField ] ;
}
}
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
const event : {
annotation : any ;
time : number ;
timeEnd? : number ;
text : string ;
tags : string | string [ ] ;
} = {
annotation : annotation ,
time : toUtc ( time ) . valueOf ( ) ,
text : getFieldFromSource ( source , textField ) ,
tags : getFieldFromSource ( source , tagsField ) ,
} ;
if ( timeEndField ) {
const timeEnd = getFieldFromSource ( source , timeEndField ) ;
if ( timeEnd ) {
event . timeEnd = toUtc ( timeEnd ) . valueOf ( ) ;
}
}
2019-10-15 19:16:08 +08:00
2021-01-11 13:47:51 +08:00
// legacy support for title tield
if ( annotation . titleField ) {
const title = getFieldFromSource ( source , annotation . titleField ) ;
if ( title ) {
event . text = title + '\n' + event . text ;
}
}
2017-10-07 16:31:39 +08:00
2021-01-11 13:47:51 +08:00
if ( typeof event . tags === 'string' ) {
event . tags = event . tags . split ( ',' ) ;
}
2017-10-07 16:31:39 +08:00
2021-01-11 13:47:51 +08:00
list . push ( event ) ;
}
return list ;
} )
)
2021-08-19 12:38:31 +08:00
) ;
2017-10-07 16:31:39 +08:00
}
2017-09-28 18:52:39 +08:00
2021-11-24 22:09:10 +08:00
private interpolateLuceneQuery ( queryString : string , scopedVars? : ScopedVars ) {
return this . templateSrv . replace ( queryString , scopedVars , 'lucene' ) ;
2020-11-11 20:56:43 +08:00
}
2023-03-24 00:46:33 +08:00
interpolateVariablesInQueries ( queries : ElasticsearchQuery [ ] , scopedVars : ScopedVars | { } ) : ElasticsearchQuery [ ] {
return queries . map ( ( q ) = > this . applyTemplateVariables ( q , scopedVars ) ) ;
2019-10-08 23:01:20 +08:00
}
2017-09-28 18:52:39 +08:00
testDatasource() {
// validate that the index exist and has date field
2021-08-19 12:38:31 +08:00
return lastValueFrom (
this . getFields ( [ 'date' ] ) . pipe (
2021-01-20 14:59:48 +08:00
mergeMap ( ( dateFields ) = > {
2021-04-21 15:38:00 +08:00
const timeField : any = find ( dateFields , { text : this.timeField } ) ;
2021-01-11 13:47:51 +08:00
if ( ! timeField ) {
return of ( { status : 'error' , message : 'No date field named ' + this . timeField + ' found' } ) ;
}
return of ( { status : 'success' , message : 'Index OK. Time field name OK.' } ) ;
} ) ,
2021-01-20 14:59:48 +08:00
catchError ( ( err ) = > {
2021-01-11 13:47:51 +08:00
console . error ( err ) ;
if ( err . message ) {
return of ( { status : 'error' , message : err.message } ) ;
} else {
return of ( { status : 'error' , message : err.status } ) ;
}
} )
)
2021-08-19 12:38:31 +08:00
) ;
2017-09-28 18:52:39 +08:00
}
2020-12-17 19:24:20 +08:00
getQueryHeader ( searchType : any , timeFrom? : DateTime , timeTo? : DateTime ) : string {
2018-09-03 17:00:46 +08:00
const queryHeader : any = {
2017-12-19 23:06:54 +08:00
search_type : searchType ,
ignore_unavailable : true ,
2017-12-20 19:33:33 +08:00
index : this.indexPattern.getIndexList ( timeFrom , timeTo ) ,
2017-12-19 23:06:54 +08:00
} ;
2020-07-08 17:05:20 +08:00
2020-12-04 22:29:40 +08:00
return JSON . stringify ( queryHeader ) ;
}
getQueryDisplayText ( query : ElasticsearchQuery ) {
// TODO: This might be refactored a bit.
const metricAggs = query . metrics ;
const bucketAggs = query . bucketAggs ;
let text = '' ;
if ( query . query ) {
text += 'Query: ' + query . query + ', ' ;
}
text += 'Metrics: ' ;
text += metricAggs ? . reduce ( ( acc , metric ) = > {
const metricConfig = metricAggregationConfig [ metric . type ] ;
let text = metricConfig . label + '(' ;
if ( isMetricAggregationWithField ( metric ) ) {
text += metric . field ;
}
if ( isPipelineAggregationWithMultipleBucketPaths ( metric ) ) {
2021-03-05 20:48:45 +08:00
text += getScriptValue ( metric ) . replace ( new RegExp ( 'params.' , 'g' ) , '' ) ;
2020-12-04 22:29:40 +08:00
}
text += '), ' ;
return ` ${ acc } ${ text } ` ;
} , '' ) ;
text += bucketAggs ? . reduce ( ( acc , bucketAgg , index ) = > {
const bucketConfig = bucketAggregationConfig [ bucketAgg . type ] ;
let text = '' ;
if ( index === 0 ) {
text += ' Group by: ' ;
}
text += bucketConfig . label + '(' ;
if ( isBucketAggregationWithField ( bucketAgg ) ) {
text += bucketAgg . field ;
}
return ` ${ acc } ${ text } ), ` ;
} , '' ) ;
if ( query . alias ) {
text += 'Alias: ' + query . alias ;
}
return text ;
2017-09-28 18:52:39 +08:00
}
2020-12-10 19:19:14 +08:00
showContextToggle ( ) : boolean {
2022-08-22 22:25:20 +08:00
return true ;
2020-12-10 19:19:14 +08:00
}
getLogRowContext = async ( row : LogRowModel , options? : RowContextOptions ) : Promise < { data : DataFrame [ ] } > = > {
2021-01-20 14:59:48 +08:00
const sortField = row . dataFrame . fields . find ( ( f ) = > f . name === 'sort' ) ;
2020-12-10 19:19:14 +08:00
const searchAfter = sortField ? . values . get ( row . rowIndex ) || [ row . timeEpochMs ] ;
2020-12-17 19:24:20 +08:00
const sort = options ? . direction === 'FORWARD' ? 'asc' : 'desc' ;
const header =
options ? . direction === 'FORWARD'
? this . getQueryHeader ( 'query_then_fetch' , dateTime ( row . timeEpochMs ) )
: this . getQueryHeader ( 'query_then_fetch' , undefined , dateTime ( row . timeEpochMs ) ) ;
2020-12-10 19:19:14 +08:00
const limit = options ? . limit ? ? 10 ;
const esQuery = JSON . stringify ( {
size : limit ,
query : {
bool : {
filter : [
{
range : {
[ this . timeField ] : {
2020-12-17 19:24:20 +08:00
[ options ? . direction === 'FORWARD' ? 'gte' : 'lte' ] : row . timeEpochMs ,
2020-12-10 19:19:14 +08:00
format : 'epoch_millis' ,
} ,
} ,
} ,
] ,
} ,
} ,
2020-12-17 19:24:20 +08:00
sort : [ { [ this . timeField ] : sort } , { _doc : sort } ] ,
2020-12-10 19:19:14 +08:00
search_after : searchAfter ,
} ) ;
const payload = [ header , esQuery ] . join ( '\n' ) + '\n' ;
const url = this . getMultiSearchUrl ( ) ;
2021-08-19 12:38:31 +08:00
const response = await lastValueFrom ( this . post ( url , payload ) ) ;
2021-03-02 17:10:41 +08:00
const targets : ElasticsearchQuery [ ] = [ { refId : ` ${ row . dataFrame . refId } ` , metrics : [ { type : 'logs' , id : '1' } ] } ] ;
2020-12-17 19:24:20 +08:00
const elasticResponse = new ElasticResponse ( targets , transformHitsBasedOnDirection ( response , sort ) ) ;
2020-12-10 19:19:14 +08:00
const logResponse = elasticResponse . getLogs ( this . logMessageField , this . logLevelField ) ;
2021-04-21 15:38:00 +08:00
const dataFrame = _first ( logResponse . data ) ;
2020-12-10 19:19:14 +08:00
if ( ! dataFrame ) {
return { data : [ ] } ;
}
/ * *
* The LogRowContextProvider requires there is a field in the dataFrame . fields
* named ` ts ` for timestamp and ` line ` for the actual log line to display .
* Unfortunatly these fields are hardcoded and are required for the lines to
* be properly displayed . This code just copies the fields based on this . timeField
* and this . logMessageField and recreates the dataFrame so it works .
* /
const timestampField = dataFrame . fields . find ( ( f : Field ) = > f . name === this . timeField ) ;
const lineField = dataFrame . fields . find ( ( f : Field ) = > f . name === this . logMessageField ) ;
if ( timestampField && lineField ) {
return {
data : [
{
. . . dataFrame ,
fields : [ . . . dataFrame . fields , { . . . timestampField , name : 'ts' } , { . . . lineField , name : 'line' } ] ,
} ,
] ,
} ;
}
return logResponse ;
} ;
2023-01-20 21:20:49 +08:00
getDataProvider (
type : SupplementaryQueryType ,
request : DataQueryRequest < ElasticsearchQuery >
) : Observable < DataQueryResponse > | undefined {
if ( ! this . getSupportedSupplementaryQueryTypes ( ) . includes ( type ) ) {
return undefined ;
}
switch ( type ) {
case SupplementaryQueryType . LogsVolume :
return this . getLogsVolumeDataProvider ( request ) ;
default :
return undefined ;
}
}
getSupportedSupplementaryQueryTypes ( ) : SupplementaryQueryType [ ] {
return [ SupplementaryQueryType . LogsVolume ] ;
}
2023-01-26 23:06:10 +08:00
getSupplementaryQuery ( type : SupplementaryQueryType , query : ElasticsearchQuery ) : ElasticsearchQuery | undefined {
if ( ! this . getSupportedSupplementaryQueryTypes ( ) . includes ( type ) ) {
2021-11-02 18:53:47 +08:00
return undefined ;
}
2023-01-26 23:06:10 +08:00
let isQuerySuitable = false ;
switch ( type ) {
case SupplementaryQueryType . LogsVolume :
// it has to be a logs-producing range-query
isQuerySuitable = ! ! ( query . metrics ? . length === 1 && query . metrics [ 0 ] . type === 'logs' ) ;
if ( ! isQuerySuitable ) {
return undefined ;
}
const bucketAggs : BucketAggregation [ ] = [ ] ;
const timeField = this . timeField ? ? '@timestamp' ;
if ( this . logLevelField ) {
bucketAggs . push ( {
id : '2' ,
type : 'terms' ,
settings : {
min_doc_count : '0' ,
size : '0' ,
order : 'desc' ,
orderBy : '_count' ,
missing : LogLevel.unknown ,
} ,
field : this.logLevelField ,
} ) ;
}
2021-11-02 18:53:47 +08:00
bucketAggs . push ( {
2023-01-26 23:06:10 +08:00
id : '3' ,
type : 'date_histogram' ,
2021-11-02 18:53:47 +08:00
settings : {
2023-01-26 23:06:10 +08:00
interval : 'auto' ,
2021-11-02 18:53:47 +08:00
min_doc_count : '0' ,
2023-01-26 23:06:10 +08:00
trimEdges : '0' ,
2021-11-02 18:53:47 +08:00
} ,
2023-01-26 23:06:10 +08:00
field : timeField ,
2021-11-02 18:53:47 +08:00
} ) ;
2023-01-26 23:06:10 +08:00
return {
refId : ` ${ REF_ID_STARTER_LOG_VOLUME } ${ query . refId } ` ,
query : query.query ,
metrics : [ { type : 'count' , id : '1' } ] ,
timeField ,
bucketAggs ,
} ;
2021-11-02 18:53:47 +08:00
2023-01-26 23:06:10 +08:00
default :
return undefined ;
}
}
getLogsVolumeDataProvider ( request : DataQueryRequest < ElasticsearchQuery > ) : Observable < DataQueryResponse > | undefined {
const logsVolumeRequest = cloneDeep ( request ) ;
const targets = logsVolumeRequest . targets
. map ( ( target ) = > this . getSupplementaryQuery ( SupplementaryQueryType . LogsVolume , target ) )
. filter ( ( query ) : query is ElasticsearchQuery = > ! ! query ) ;
if ( ! targets . length ) {
return undefined ;
}
return queryLogsVolume (
this ,
{ . . . logsVolumeRequest , targets } ,
{
range : request.range ,
targets : request.targets ,
extractLevel : ( dataFrame ) = > getLogLevelFromKey ( dataFrame . name || '' ) ,
}
) ;
2021-11-02 18:53:47 +08:00
}
2022-11-29 00:26:45 +08:00
query ( request : DataQueryRequest < ElasticsearchQuery > ) : Observable < DataQueryResponse > {
2023-03-27 21:52:27 +08:00
// Run request through backend if it is coming from Explore and disableElasticsearchBackendExploreQuery is not set
// or if elasticsearchBackendMigration feature toggle is enabled
const { elasticsearchBackendMigration , disableElasticsearchBackendExploreQuery } = config . featureToggles ;
2022-11-29 00:26:45 +08:00
const shouldRunTroughBackend =
2023-03-27 21:52:27 +08:00
( request . app === CoreApp . Explore && ! disableElasticsearchBackendExploreQuery ) || elasticsearchBackendMigration ;
2022-11-29 00:26:45 +08:00
if ( shouldRunTroughBackend ) {
2022-12-15 16:59:50 +08:00
const start = new Date ( ) ;
return super . query ( request ) . pipe ( tap ( ( response ) = > trackQuery ( response , request , start ) ) ) ;
2022-11-29 00:26:45 +08:00
}
2018-08-30 15:03:11 +08:00
let payload = '' ;
2022-11-29 00:26:45 +08:00
const targets = this . interpolateVariablesInQueries ( cloneDeep ( request . targets ) , request . scopedVars ) ;
2019-06-25 04:15:03 +08:00
const sentTargets : ElasticsearchQuery [ ] = [ ] ;
2021-03-02 17:10:41 +08:00
let targetsContainsLogsQuery = targets . some ( ( target ) = > hasMetricOfType ( target , 'logs' ) ) ;
2017-09-28 18:52:39 +08:00
2021-05-19 16:07:17 +08:00
const logLimits : Array < number | undefined > = [ ] ;
2019-03-26 23:15:23 +08:00
for ( const target of targets ) {
2017-12-19 23:06:54 +08:00
if ( target . hide ) {
continue ;
}
2017-09-28 18:52:39 +08:00
2019-06-25 04:15:03 +08:00
let queryObj ;
2021-03-02 17:10:41 +08:00
if ( hasMetricOfType ( target , 'logs' ) ) {
2021-04-14 00:39:07 +08:00
// FIXME: All this logic here should be in the query builder.
// When moving to the BE-only implementation we should remove this and let the BE
// Handle this.
// TODO: defaultBucketAgg creates a dete_histogram aggregation without a field, so it fallbacks to
// the configured timeField. we should allow people to use a different time field here.
2020-12-04 22:29:40 +08:00
target . bucketAggs = [ defaultBucketAgg ( ) ] ;
2021-04-14 00:39:07 +08:00
const log = target . metrics ? . find ( ( m ) = > m . type === 'logs' ) as Logs ;
const limit = log . settings ? . limit ? parseInt ( log . settings ? . limit , 10 ) : 500 ;
2021-05-19 16:07:17 +08:00
logLimits . push ( limit ) ;
2021-04-14 00:39:07 +08:00
2020-07-09 22:14:55 +08:00
target . metrics = [ ] ;
2019-11-07 20:13:24 +08:00
// Setting this for metrics queries that are typed as logs
2023-03-24 00:46:33 +08:00
queryObj = this . queryBuilder . getLogsQuery ( target , limit ) ;
2019-06-25 04:15:03 +08:00
} else {
2021-05-19 16:07:17 +08:00
logLimits . push ( ) ;
2019-06-25 04:15:03 +08:00
if ( target . alias ) {
2022-11-29 00:26:45 +08:00
target . alias = this . interpolateLuceneQuery ( target . alias , request . scopedVars ) ;
2019-06-25 04:15:03 +08:00
}
2023-03-24 00:46:33 +08:00
queryObj = this . queryBuilder . build ( target ) ;
2019-06-25 04:15:03 +08:00
}
2020-12-04 22:29:40 +08:00
const esQuery = JSON . stringify ( queryObj ) ;
2017-09-28 18:52:39 +08:00
2022-08-22 22:25:20 +08:00
const searchType = 'query_then_fetch' ;
2022-11-29 00:26:45 +08:00
const header = this . getQueryHeader ( searchType , request . range . from , request . range . to ) ;
2017-12-20 19:33:33 +08:00
payload += header + '\n' ;
2017-12-19 23:06:54 +08:00
2017-12-20 19:33:33 +08:00
payload += esQuery + '\n' ;
2019-06-25 04:15:03 +08:00
2017-09-28 18:52:39 +08:00
sentTargets . push ( target ) ;
}
if ( sentTargets . length === 0 ) {
2021-01-11 13:47:51 +08:00
return of ( { data : [ ] } ) ;
2017-09-28 18:52:39 +08:00
}
2020-02-13 23:00:01 +08:00
// We replace the range here for actual values. We need to replace it together with enclosing "" so that we replace
// it as an integer not as string with digits. This is because elastic will convert the string only if the time
// field is specified as type date (which probably should) but can also be specified as integer (millisecond epoch)
// and then sending string will error out.
2022-11-29 00:26:45 +08:00
payload = payload . replace ( /"\$timeFrom"/g , request . range . from . valueOf ( ) . toString ( ) ) ;
payload = payload . replace ( /"\$timeTo"/g , request . range . to . valueOf ( ) . toString ( ) ) ;
payload = this . templateSrv . replace ( payload , request . scopedVars ) ;
2017-09-28 18:52:39 +08:00
2019-04-25 15:41:13 +08:00
const url = this . getMultiSearchUrl ( ) ;
2022-12-15 16:59:50 +08:00
const start = new Date ( ) ;
2021-01-11 13:47:51 +08:00
return this . post ( url , payload ) . pipe (
2021-01-20 14:59:48 +08:00
map ( ( res ) = > {
2021-01-11 13:47:51 +08:00
const er = new ElasticResponse ( sentTargets , res ) ;
2020-07-08 17:05:20 +08:00
2021-03-02 17:10:41 +08:00
// TODO: This needs to be revisited, it seems wrong to process ALL the sent queries as logs if only one of them was a log query
if ( targetsContainsLogsQuery ) {
2021-01-11 13:47:51 +08:00
const response = er . getLogs ( this . logMessageField , this . logLevelField ) ;
2021-05-19 16:07:17 +08:00
response . data . forEach ( ( dataFrame , index ) = > {
enhanceDataFrame ( dataFrame , this . dataLinks , logLimits [ index ] ) ;
} ) ;
2021-01-11 13:47:51 +08:00
return response ;
2019-12-12 00:40:56 +08:00
}
2019-06-25 04:15:03 +08:00
2021-01-11 13:47:51 +08:00
return er . getTimeSeries ( ) ;
2022-12-08 18:28:40 +08:00
} ) ,
2022-12-15 16:59:50 +08:00
tap ( ( response ) = > trackQuery ( response , request , start ) )
2021-01-11 13:47:51 +08:00
) ;
2017-10-07 16:31:39 +08:00
}
2017-09-28 18:52:39 +08:00
2020-09-15 16:14:47 +08:00
isMetadataField ( fieldName : string ) {
return ELASTIC_META_FIELDS . includes ( fieldName ) ;
}
2020-12-04 22:29:40 +08:00
// TODO: instead of being a string, this could be a custom type representing all the elastic types
2021-04-14 22:18:06 +08:00
// FIXME: This doesn't seem to return actual MetricFindValues, we should either change the return type
// or fix the implementation.
2021-06-04 18:07:59 +08:00
getFields ( type ? : string [ ] , range? : TimeRange ) : Observable < MetricFindValue [ ] > {
const typeMap : Record < string , string > = {
float : 'number' ,
double : 'number' ,
integer : 'number' ,
long : 'number' ,
date : 'date' ,
date_nanos : 'date' ,
string : 'string' ,
text : 'string' ,
scaled_float : 'number' ,
nested : 'nested' ,
histogram : 'number' ,
} ;
2021-01-11 13:47:51 +08:00
return this . get ( '/_mapping' , range ) . pipe (
2021-01-20 14:59:48 +08:00
map ( ( result ) = > {
2021-01-11 13:47:51 +08:00
const shouldAddField = ( obj : any , key : string ) = > {
if ( this . isMetadataField ( key ) ) {
return false ;
}
2017-09-28 18:52:39 +08:00
2021-06-04 18:07:59 +08:00
if ( ! type || type . length === 0 ) {
2021-01-11 13:47:51 +08:00
return true ;
}
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
// equal query type filter, or via typemap translation
2021-06-04 18:07:59 +08:00
return type . includes ( obj . type ) || type . includes ( typeMap [ obj . type ] ) ;
2021-01-11 13:47:51 +08:00
} ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total
const fieldNameParts : any = [ ] ;
const fields : any = { } ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
function getFieldsRecursively ( obj : any ) {
for ( const key in obj ) {
const subObj = obj [ key ] ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
// Check mapping field for nested fields
2021-04-21 15:38:00 +08:00
if ( isObject ( subObj . properties ) ) {
2021-01-11 13:47:51 +08:00
fieldNameParts . push ( key ) ;
getFieldsRecursively ( subObj . properties ) ;
}
2017-09-28 18:52:39 +08:00
2021-04-21 15:38:00 +08:00
if ( isObject ( subObj . fields ) ) {
2021-01-11 13:47:51 +08:00
fieldNameParts . push ( key ) ;
getFieldsRecursively ( subObj . fields ) ;
}
2017-09-28 18:52:39 +08:00
2021-04-21 15:38:00 +08:00
if ( isString ( subObj . type ) ) {
2021-01-11 13:47:51 +08:00
const fieldName = fieldNameParts . concat ( key ) . join ( '.' ) ;
2017-09-28 18:52:39 +08:00
2021-01-11 13:47:51 +08:00
// Hide meta-fields and check field type
if ( shouldAddField ( subObj , key ) ) {
fields [ fieldName ] = {
text : fieldName ,
type : subObj . type ,
} ;
}
2017-09-28 18:52:39 +08:00
}
}
2021-01-11 13:47:51 +08:00
fieldNameParts . pop ( ) ;
2017-09-28 18:52:39 +08:00
}
2019-04-25 15:41:13 +08:00
2021-01-11 13:47:51 +08:00
for ( const indexName in result ) {
const index = result [ indexName ] ;
if ( index && index . mappings ) {
const mappings = index . mappings ;
2022-08-22 22:25:20 +08:00
const properties = mappings . properties ;
getFieldsRecursively ( properties ) ;
2017-09-28 18:52:39 +08:00
}
}
2021-01-11 13:47:51 +08:00
// transform to array
2021-04-21 15:38:00 +08:00
return _map ( fields , ( value ) = > {
2021-01-11 13:47:51 +08:00
return value ;
} ) ;
} )
) ;
2017-09-28 18:52:39 +08:00
}
2021-11-24 22:09:10 +08:00
getTerms ( queryDef : TermsQuery , range = getDefaultTimeRange ( ) ) : Observable < MetricFindValue [ ] > {
2022-08-22 22:25:20 +08:00
const searchType = 'query_then_fetch' ;
2018-08-29 20:27:29 +08:00
const header = this . getQueryHeader ( searchType , range . from , range . to ) ;
2020-12-04 22:29:40 +08:00
let esQuery = JSON . stringify ( this . queryBuilder . getTermsQuery ( queryDef ) ) ;
2017-09-28 18:52:39 +08:00
2019-06-25 04:15:03 +08:00
esQuery = esQuery . replace ( /\$timeFrom/g , range . from . valueOf ( ) . toString ( ) ) ;
esQuery = esQuery . replace ( /\$timeTo/g , range . to . valueOf ( ) . toString ( ) ) ;
2017-12-20 19:33:33 +08:00
esQuery = header + '\n' + esQuery + '\n' ;
2017-09-28 18:52:39 +08:00
2019-04-25 15:41:13 +08:00
const url = this . getMultiSearchUrl ( ) ;
2021-01-11 13:47:51 +08:00
return this . post ( url , esQuery ) . pipe (
2021-01-20 14:59:48 +08:00
map ( ( res ) = > {
2021-01-11 13:47:51 +08:00
if ( ! res . responses [ 0 ] . aggregations ) {
return [ ] ;
}
2017-12-21 15:39:31 +08:00
2021-01-11 13:47:51 +08:00
const buckets = res . responses [ 0 ] . aggregations [ '1' ] . buckets ;
2021-04-21 15:38:00 +08:00
return _map ( buckets , ( bucket ) = > {
2021-01-11 13:47:51 +08:00
return {
text : bucket.key_as_string || bucket . key ,
value : bucket.key ,
} ;
} ) ;
} )
) ;
2017-09-28 18:52:39 +08:00
}
2019-04-25 15:41:13 +08:00
getMultiSearchUrl() {
2021-07-15 21:52:02 +08:00
const searchParams = new URLSearchParams ( ) ;
2022-08-22 22:25:20 +08:00
if ( this . maxConcurrentShardRequests ) {
2021-07-15 21:52:02 +08:00
searchParams . append ( 'max_concurrent_shard_requests' , ` ${ this . maxConcurrentShardRequests } ` ) ;
}
2022-08-22 22:25:20 +08:00
if ( this . xpack && this . includeFrozen ) {
2021-07-15 21:52:02 +08:00
searchParams . append ( 'ignore_throttled' , 'false' ) ;
2019-04-25 15:41:13 +08:00
}
2021-07-15 21:52:02 +08:00
return ( '_msearch?' + searchParams . toString ( ) ) . replace ( /\?$/ , '' ) ;
2019-04-25 15:41:13 +08:00
}
2020-12-17 19:24:20 +08:00
metricFindQuery ( query : string , options? : any ) : Promise < MetricFindValue [ ] > {
const range = options ? . range ;
2020-12-04 22:29:40 +08:00
const parsedQuery = JSON . parse ( query ) ;
2020-01-21 17:08:07 +08:00
if ( query ) {
2020-12-04 22:29:40 +08:00
if ( parsedQuery . find === 'fields' ) {
2021-11-24 22:09:10 +08:00
parsedQuery . type = this . interpolateLuceneQuery ( parsedQuery . type ) ;
2021-08-19 12:38:31 +08:00
return lastValueFrom ( this . getFields ( parsedQuery . type , range ) ) ;
2020-01-21 17:08:07 +08:00
}
2017-09-28 18:52:39 +08:00
2020-12-04 22:29:40 +08:00
if ( parsedQuery . find === 'terms' ) {
2021-11-24 22:09:10 +08:00
parsedQuery . field = this . interpolateLuceneQuery ( parsedQuery . field ) ;
parsedQuery . query = this . interpolateLuceneQuery ( parsedQuery . query ) ;
2021-08-19 12:38:31 +08:00
return lastValueFrom ( this . getTerms ( parsedQuery , range ) ) ;
2020-01-21 17:08:07 +08:00
}
2017-09-28 18:52:39 +08:00
}
2020-01-21 17:08:07 +08:00
return Promise . resolve ( [ ] ) ;
2017-09-28 18:52:39 +08:00
}
getTagKeys() {
2021-08-19 12:38:31 +08:00
return lastValueFrom ( this . getFields ( ) ) ;
2017-09-28 18:52:39 +08:00
}
2019-07-11 23:05:45 +08:00
getTagValues ( options : any ) {
2022-08-25 15:29:15 +08:00
const range = this . timeSrv . timeRange ( ) ;
return lastValueFrom ( this . getTerms ( { field : options.key } , range ) ) ;
2017-09-28 18:52:39 +08:00
}
2018-05-29 01:45:18 +08:00
2019-07-11 23:05:45 +08:00
targetContainsTemplate ( target : any ) {
2022-02-15 15:53:42 +08:00
if ( this . templateSrv . containsTemplate ( target . query ) || this . templateSrv . containsTemplate ( target . alias ) ) {
2018-05-29 01:45:18 +08:00
return true ;
}
2018-08-26 23:14:40 +08:00
for ( const bucketAgg of target . bucketAggs ) {
2022-02-15 15:53:42 +08:00
if ( this . templateSrv . containsTemplate ( bucketAgg . field ) || this . objectContainsTemplate ( bucketAgg . settings ) ) {
2018-05-29 01:45:18 +08:00
return true ;
}
}
2018-08-26 23:14:40 +08:00
for ( const metric of target . metrics ) {
2018-05-29 01:45:18 +08:00
if (
2022-02-15 15:53:42 +08:00
this . templateSrv . containsTemplate ( metric . field ) ||
2018-05-29 01:45:18 +08:00
this . objectContainsTemplate ( metric . settings ) ||
this . objectContainsTemplate ( metric . meta )
) {
return true ;
}
}
return false ;
}
2019-07-11 23:05:45 +08:00
private isPrimitive ( obj : any ) {
2018-05-29 01:45:18 +08:00
if ( obj === null || obj === undefined ) {
return true ;
}
2021-01-20 14:59:48 +08:00
if ( [ 'string' , 'number' , 'boolean' ] . some ( ( type ) = > type === typeof true ) ) {
2018-05-29 01:45:18 +08:00
return true ;
}
return false ;
}
2019-07-11 23:05:45 +08:00
private objectContainsTemplate ( obj : any ) {
2018-05-29 01:45:18 +08:00
if ( ! obj ) {
return false ;
}
2018-08-26 23:14:40 +08:00
for ( const key of Object . keys ( obj ) ) {
2018-05-29 01:45:18 +08:00
if ( this . isPrimitive ( obj [ key ] ) ) {
2022-02-15 15:53:42 +08:00
if ( this . templateSrv . containsTemplate ( obj [ key ] ) ) {
2018-05-29 01:45:18 +08:00
return true ;
}
} else if ( Array . isArray ( obj [ key ] ) ) {
2018-08-26 23:14:40 +08:00
for ( const item of obj [ key ] ) {
2018-05-29 01:45:18 +08:00
if ( this . objectContainsTemplate ( item ) ) {
return true ;
}
}
} else {
if ( this . objectContainsTemplate ( obj [ key ] ) ) {
return true ;
}
}
}
return false ;
}
2022-07-15 20:37:53 +08:00
2022-07-18 20:13:34 +08:00
modifyQuery ( query : ElasticsearchQuery , action : QueryFixAction ) : ElasticsearchQuery {
if ( ! action . options ) {
return query ;
}
2022-07-15 20:37:53 +08:00
let expression = query . query ? ? '' ;
switch ( action . type ) {
case 'ADD_FILTER' : {
if ( expression . length > 0 ) {
expression += ' AND ' ;
}
2022-07-18 20:13:34 +08:00
expression += ` ${ action . options . key } :" ${ action . options . value } " ` ;
2022-07-15 20:37:53 +08:00
break ;
}
case 'ADD_FILTER_OUT' : {
if ( expression . length > 0 ) {
expression += ' AND ' ;
}
2022-07-18 20:13:34 +08:00
expression += ` - ${ action . options . key } :" ${ action . options . value } " ` ;
2022-07-15 20:37:53 +08:00
break ;
}
}
return { . . . query , query : expression } ;
}
2022-12-22 23:06:30 +08:00
addAdHocFilters ( query : string ) {
const adhocFilters = this . templateSrv . getAdhocFilters ( this . name ) ;
if ( adhocFilters . length === 0 ) {
return query ;
}
const esFilters = adhocFilters . map ( ( filter ) = > {
const { key , operator , value } = filter ;
if ( ! key || ! value ) {
return ;
}
switch ( operator ) {
case '=' :
return ` ${ key } :" ${ value } " ` ;
case '!=' :
return ` - ${ key } :" ${ value } " ` ;
case '=~' :
return ` ${ key } :/ ${ value } / ` ;
case '!~' :
return ` - ${ key } :/ ${ value } / ` ;
case '>' :
return ` ${ key } :> ${ value } ` ;
case '<' :
return ` ${ key } :< ${ value } ` ;
}
return ;
} ) ;
const finalQuery = [ query , . . . esFilters ] . filter ( ( f ) = > f ) . join ( ' AND ' ) ;
return finalQuery ;
}
2023-03-24 00:46:33 +08:00
// Used when running queries through backend
applyTemplateVariables ( query : ElasticsearchQuery , scopedVars : ScopedVars ) : ElasticsearchQuery {
// We need a separate interpolation format for lucene queries, therefore we first interpolate any
// lucene query string and then everything else
const interpolateBucketAgg = ( bucketAgg : BucketAggregation ) : BucketAggregation = > {
if ( bucketAgg . type === 'filters' ) {
return {
. . . bucketAgg ,
settings : {
. . . bucketAgg . settings ,
filters : bucketAgg.settings?.filters?.map ( ( filter ) = > ( {
. . . filter ,
query : this.interpolateLuceneQuery ( filter . query , scopedVars ) || '*' ,
} ) ) ,
} ,
} ;
}
return bucketAgg ;
} ;
const expandedQuery = {
. . . query ,
datasource : this.getRef ( ) ,
query : this.addAdHocFilters ( this . interpolateLuceneQuery ( query . query || '' , scopedVars ) ) ,
bucketAggs : query.bucketAggs?.map ( interpolateBucketAgg ) ,
} ;
const finalQuery = JSON . parse ( this . templateSrv . replace ( JSON . stringify ( expandedQuery ) , scopedVars ) ) ;
return finalQuery ;
}
2017-09-28 18:52:39 +08:00
}
2020-07-01 15:45:21 +08:00
/ * *
* Modifies dataframe and adds dataLinks from the config .
* Exported for tests .
* /
2021-05-19 16:07:17 +08:00
export function enhanceDataFrame ( dataFrame : DataFrame , dataLinks : DataLinkConfig [ ] , limit? : number ) {
if ( limit ) {
dataFrame . meta = {
. . . dataFrame . meta ,
limit ,
} ;
}
2020-12-02 02:10:23 +08:00
if ( ! dataLinks . length ) {
return ;
}
for ( const field of dataFrame . fields ) {
2022-01-14 22:05:23 +08:00
const linksToApply = dataLinks . filter ( ( dataLink ) = > new RegExp ( dataLink . field ) . test ( field . name ) ) ;
2020-12-02 02:10:23 +08:00
2022-01-14 22:05:23 +08:00
if ( linksToApply . length === 0 ) {
2020-12-02 02:10:23 +08:00
continue ;
2020-07-01 15:45:21 +08:00
}
2020-12-02 02:10:23 +08:00
2022-01-14 22:05:23 +08:00
field . config = field . config || { } ;
field . config . links = [ . . . ( field . config . links || [ ] , linksToApply . map ( generateDataLink ) ) ] ;
}
}
2020-12-02 02:10:23 +08:00
2022-01-14 22:05:23 +08:00
function generateDataLink ( linkConfig : DataLinkConfig ) : DataLink {
const dataSourceSrv = getDataSourceSrv ( ) ;
2020-12-02 02:10:23 +08:00
2022-01-14 22:05:23 +08:00
if ( linkConfig . datasourceUid ) {
const dsSettings = dataSourceSrv . getInstanceSettings ( linkConfig . datasourceUid ) ;
2020-12-02 02:10:23 +08:00
2022-01-14 22:05:23 +08:00
return {
title : linkConfig.urlDisplayLabel || '' ,
url : '' ,
internal : {
query : { query : linkConfig.url } ,
datasourceUid : linkConfig.datasourceUid ,
datasourceName : dsSettings?.name ? ? 'Data source not found' ,
} ,
} ;
} else {
return {
title : linkConfig.urlDisplayLabel || '' ,
url : linkConfig.url ,
} ;
2020-07-01 15:45:21 +08:00
}
}
2020-12-10 19:19:14 +08:00
function transformHitsBasedOnDirection ( response : any , direction : 'asc' | 'desc' ) {
if ( direction === 'desc' ) {
return response ;
}
const actualResponse = response . responses [ 0 ] ;
return {
. . . response ,
responses : [
{
. . . actualResponse ,
hits : {
. . . actualResponse . hits ,
hits : actualResponse.hits.hits.reverse ( ) ,
} ,
} ,
] ,
} ;
}