Allow searching issues for strings containing colons
This commit is contained in:
parent
dc303fe2db
commit
fe1ab40529
|
@ -173,7 +173,7 @@
|
||||||
tokens.forEach((token) => {
|
tokens.forEach((token) => {
|
||||||
const condition = gl.FilteredSearchTokenKeys
|
const condition = gl.FilteredSearchTokenKeys
|
||||||
.searchByConditionKeyValue(token.key, token.value.toLowerCase());
|
.searchByConditionKeyValue(token.key, token.value.toLowerCase());
|
||||||
const { param } = gl.FilteredSearchTokenKeys.searchByKey(token.key);
|
const { param } = gl.FilteredSearchTokenKeys.searchByKey(token.key) || {};
|
||||||
const keyParam = param ? `${token.key}_${param}` : token.key;
|
const keyParam = param ? `${token.key}_${param}` : token.key;
|
||||||
let tokenPath = '';
|
let tokenPath = '';
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
|
require('./filtered_search_token_keys');
|
||||||
|
|
||||||
(() => {
|
(() => {
|
||||||
class FilteredSearchTokenizer {
|
class FilteredSearchTokenizer {
|
||||||
static processTokens(input) {
|
static processTokens(input) {
|
||||||
|
const allowedKeys = gl.FilteredSearchTokenKeys.get().map(i => i.key);
|
||||||
// Regex extracts `(token):(symbol)(value)`
|
// Regex extracts `(token):(symbol)(value)`
|
||||||
// Values that start with a double quote must end in a double quote (same for single)
|
// Values that start with a double quote must end in a double quote (same for single)
|
||||||
const tokenRegex = /(\w+):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\S+))/g;
|
const tokenRegex = new RegExp(`(${allowedKeys.join('|')}):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`, 'g');
|
||||||
const tokens = [];
|
const tokens = [];
|
||||||
let lastToken = null;
|
let lastToken = null;
|
||||||
const searchToken = input.replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
|
const searchToken = input.replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
title: Allow searching issues for strings containing colons
|
||||||
|
merge_request:
|
||||||
|
author:
|
|
@ -99,6 +99,29 @@ require('~/filtered_search/filtered_search_tokenizer');
|
||||||
expect(results.tokens[2].value).toBe('Doing');
|
expect(results.tokens[2].value).toBe('Doing');
|
||||||
expect(results.tokens[2].symbol).toBe('~');
|
expect(results.tokens[2].symbol).toBe('~');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('returns search value for invalid tokens', () => {
|
||||||
|
const results = gl.FilteredSearchTokenizer.processTokens('fake:token');
|
||||||
|
expect(results.lastToken).toBe('fake:token');
|
||||||
|
expect(results.searchToken).toBe('fake:token');
|
||||||
|
expect(results.tokens.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns search value and token for mix of valid and invalid tokens', () => {
|
||||||
|
const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token');
|
||||||
|
expect(results.tokens.length).toEqual(1);
|
||||||
|
expect(results.tokens[0].key).toBe('label');
|
||||||
|
expect(results.tokens[0].value).toBe('real');
|
||||||
|
expect(results.tokens[0].symbol).toBe('');
|
||||||
|
expect(results.lastToken).toBe('fake:token');
|
||||||
|
expect(results.searchToken).toBe('fake:token');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns search value for invalid symbols', () => {
|
||||||
|
const results = gl.FilteredSearchTokenizer.processTokens('std::includes');
|
||||||
|
expect(results.lastToken).toBe('std::includes');
|
||||||
|
expect(results.searchToken).toBe('std::includes');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
})();
|
})();
|
||||||
|
|
Loading…
Reference in New Issue