Elasticsearch: Add word highlighting to search results (#30293)

* Elasticsearch: Add word highlighting to logs

* Remove unused deps; refactor matching code to remove matchAll

* Adding searchWords test

* Adding two matches for highlighting

* Adding code comments

* formatting comments
pull/30653/head
Chris Cowan 4 years ago committed by GitHub
parent c8deaeacce
commit 65b1e536d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 42
      public/app/plugins/datasource/elasticsearch/elastic_response.ts
  2. 10
      public/app/plugins/datasource/elasticsearch/query_builder.ts
  3. 5
      public/app/plugins/datasource/elasticsearch/query_def.ts
  4. 19
      public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts

@ -18,6 +18,8 @@ import {
import { describeMetric } from './utils';
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
const HIGHLIGHT_TAGS_EXP = `${queryDef.highlightTags.pre}([^@]+)${queryDef.highlightTags.post}`;
export class ElasticResponse {
constructor(private targets: ElasticsearchQuery[], private response: any) {
this.targets = targets;
@ -373,6 +375,7 @@ export class ElasticResponse {
_type: hit._type,
_index: hit._index,
sort: hit.sort,
highlight: hit.highlight,
};
if (hit._source) {
@ -438,7 +441,6 @@ export class ElasticResponse {
logLevelField?: string
): DataQueryResponse {
const dataFrame: DataFrame[] = [];
for (let n = 0; n < this.response.responses.length; n++) {
const response = this.response.responses[n];
if (response.error) {
@ -459,11 +461,41 @@ export class ElasticResponse {
// Add a row for each document
for (const doc of docs) {
if (logLevelField) {
// Remap level field based on the datasource config. This field is then used in explore to figure out the
// log level. We may rewrite some actual data in the level field if they are different.
// Remap level field based on the datasource config. This field is
// then used in explore to figure out the log level. We may rewrite
// some actual data in the level field if they are different.
doc['level'] = doc[logLevelField];
}
// When highlighting exists, we need to collect all the highlighted
// phrases and add them to the DataFrame's meta.searchWords array.
if (doc.highlight) {
// There might be multiple words so we need two versions of the
// regular expression. One to match gobally, when used with part.match,
// it returns and array of matches. The second one is used to capture the
// values between the tags.
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
const newSearchWords = Object.keys(doc.highlight)
.flatMap((key) => {
return doc.highlight[key].flatMap((line: string) => {
const matchedPhrases = line.match(globalHighlightWordRegex);
if (!matchedPhrases) {
return [];
}
return matchedPhrases.map((part) => {
const matches = part.match(highlightWordRegex);
return (matches && matches[1]) || null;
});
});
})
.filter(_.identity);
// If meta and searchWords already exists, add the words and
// deduplicate otherwise create a new set of search words.
const searchWords = series.meta?.searchWords
? _.uniq([...series.meta.searchWords, ...newSearchWords])
: [...newSearchWords];
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
}
series.add(doc);
}
if (isLogsRequest) {
@ -554,6 +586,7 @@ type Doc = {
_index: string;
_source?: any;
sort?: Array<string | number>;
highlight?: Record<string, string[]>;
};
/**
@ -575,6 +608,7 @@ const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames
_type: hit._type,
_index: hit._index,
sort: hit.sort,
highlight: hit.highlight,
_source: { ...flattened },
...flattened,
};

@ -10,7 +10,7 @@ import {
isPipelineAggregation,
isPipelineAggregationWithMultipleBucketPaths,
} from './components/QueryEditor/MetricAggregationsEditor/aggregations';
import { defaultBucketAgg, defaultMetricAgg, findMetricById } from './query_def';
import { defaultBucketAgg, defaultMetricAgg, findMetricById, highlightTags } from './query_def';
import { ElasticsearchQuery } from './types';
import { convertOrderByToMetricId } from './utils';
@ -436,6 +436,14 @@ export class ElasticQueryBuilder {
return {
...query,
aggs: this.build(target, null, querystring).aggs,
highlight: {
fields: {
'*': {},
},
pre_tags: [highlightTags.pre],
post_tags: [highlightTags.post],
fragment_size: 2147483647,
},
};
}
}

@ -26,6 +26,11 @@ export const movingAvgModelOptions: MovingAverageModelOption[] = [
{ label: 'Holt Winters', value: 'holt_winters' },
];
export const highlightTags = {
pre: '@HIGHLIGHT@',
post: '@/HIGHLIGHT@',
};
export function defaultMetricAgg(id = '1'): MetricAggregation {
return { type: 'count', id };
}

@ -2,6 +2,7 @@ import { DataFrameView, FieldCache, KeyValue, MutableDataFrame } from '@grafana/
import { ElasticResponse } from '../elastic_response';
import flatten from 'app/core/utils/flatten';
import { ElasticsearchQuery } from '../types';
import { highlightTags } from '../query_def';
describe('ElasticResponse', () => {
let targets: ElasticsearchQuery[];
@ -1218,7 +1219,7 @@ describe('ElasticResponse', () => {
key: 'Q-1561369883389-0.7611823271062786-0',
liveStreaming: false,
maxDataPoints: 1620,
query: '',
query: 'hello AND message',
timeField: '@timestamp',
},
];
@ -1254,6 +1255,11 @@ describe('ElasticResponse', () => {
lvl: 'debug',
},
},
highlight: {
message: [
`${highlightTags.pre}hello${highlightTags.post}, i am a ${highlightTags.pre}message${highlightTags.post}`,
],
},
},
{
_id: 'kdospaidopa',
@ -1268,6 +1274,11 @@ describe('ElasticResponse', () => {
lvl: 'info',
},
},
highlight: {
message: [
`${highlightTags.pre}hello${highlightTags.post}, i am a ${highlightTags.pre}message${highlightTags.post}`,
],
},
},
],
},
@ -1279,6 +1290,12 @@ describe('ElasticResponse', () => {
const result = new ElasticResponse(targets, response).getLogs();
expect(result.data.length).toBe(2);
const logResults = result.data[0] as MutableDataFrame;
expect(logResults).toHaveProperty('meta');
expect(logResults.meta).toEqual({
searchWords: ['hello', 'message'],
preferredVisualisationType: 'logs',
});
const fields = logResults.fields.map((f) => {
return {
name: f.name,

Loading…
Cancel
Save