|
|
|
@ -18,6 +18,8 @@ import { |
|
|
|
|
import { describeMetric } from './utils'; |
|
|
|
|
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils'; |
|
|
|
|
|
|
|
|
|
const HIGHLIGHT_TAGS_EXP = `${queryDef.highlightTags.pre}([^@]+)${queryDef.highlightTags.post}`; |
|
|
|
|
|
|
|
|
|
export class ElasticResponse { |
|
|
|
|
constructor(private targets: ElasticsearchQuery[], private response: any) { |
|
|
|
|
this.targets = targets; |
|
|
|
@ -373,6 +375,7 @@ export class ElasticResponse { |
|
|
|
|
_type: hit._type, |
|
|
|
|
_index: hit._index, |
|
|
|
|
sort: hit.sort, |
|
|
|
|
highlight: hit.highlight, |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
if (hit._source) { |
|
|
|
@ -438,7 +441,6 @@ export class ElasticResponse { |
|
|
|
|
logLevelField?: string |
|
|
|
|
): DataQueryResponse { |
|
|
|
|
const dataFrame: DataFrame[] = []; |
|
|
|
|
|
|
|
|
|
for (let n = 0; n < this.response.responses.length; n++) { |
|
|
|
|
const response = this.response.responses[n]; |
|
|
|
|
if (response.error) { |
|
|
|
@ -459,11 +461,41 @@ export class ElasticResponse { |
|
|
|
|
// Add a row for each document
|
|
|
|
|
for (const doc of docs) { |
|
|
|
|
if (logLevelField) { |
|
|
|
|
// Remap level field based on the datasource config. This field is then used in explore to figure out the
|
|
|
|
|
// log level. We may rewrite some actual data in the level field if they are different.
|
|
|
|
|
// Remap level field based on the datasource config. This field is
|
|
|
|
|
// then used in explore to figure out the log level. We may rewrite
|
|
|
|
|
// some actual data in the level field if they are different.
|
|
|
|
|
doc['level'] = doc[logLevelField]; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// When highlighting exists, we need to collect all the highlighted
|
|
|
|
|
// phrases and add them to the DataFrame's meta.searchWords array.
|
|
|
|
|
if (doc.highlight) { |
|
|
|
|
// There might be multiple words so we need two versions of the
|
|
|
|
|
// regular expression. One to match gobally, when used with part.match,
|
|
|
|
|
// it returns and array of matches. The second one is used to capture the
|
|
|
|
|
// values between the tags.
|
|
|
|
|
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g'); |
|
|
|
|
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP); |
|
|
|
|
const newSearchWords = Object.keys(doc.highlight) |
|
|
|
|
.flatMap((key) => { |
|
|
|
|
return doc.highlight[key].flatMap((line: string) => { |
|
|
|
|
const matchedPhrases = line.match(globalHighlightWordRegex); |
|
|
|
|
if (!matchedPhrases) { |
|
|
|
|
return []; |
|
|
|
|
} |
|
|
|
|
return matchedPhrases.map((part) => { |
|
|
|
|
const matches = part.match(highlightWordRegex); |
|
|
|
|
return (matches && matches[1]) || null; |
|
|
|
|
}); |
|
|
|
|
}); |
|
|
|
|
}) |
|
|
|
|
.filter(_.identity); |
|
|
|
|
// If meta and searchWords already exists, add the words and
|
|
|
|
|
// deduplicate otherwise create a new set of search words.
|
|
|
|
|
const searchWords = series.meta?.searchWords |
|
|
|
|
? _.uniq([...series.meta.searchWords, ...newSearchWords]) |
|
|
|
|
: [...newSearchWords]; |
|
|
|
|
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords }; |
|
|
|
|
} |
|
|
|
|
series.add(doc); |
|
|
|
|
} |
|
|
|
|
if (isLogsRequest) { |
|
|
|
@ -554,6 +586,7 @@ type Doc = { |
|
|
|
|
_index: string; |
|
|
|
|
_source?: any; |
|
|
|
|
sort?: Array<string | number>; |
|
|
|
|
highlight?: Record<string, string[]>; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
/** |
|
|
|
@ -575,6 +608,7 @@ const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames |
|
|
|
|
_type: hit._type, |
|
|
|
|
_index: hit._index, |
|
|
|
|
sort: hit.sort, |
|
|
|
|
highlight: hit.highlight, |
|
|
|
|
_source: { ...flattened }, |
|
|
|
|
...flattened, |
|
|
|
|
}; |
|
|
|
|