From fe1ab40529bd91eea6e2f9e086bbf9b91ae37891 Mon Sep 17 00:00:00 2001
From: Simon Knox <psimyn@gmail.com>
Date: Tue, 21 Feb 2017 11:33:42 +1100
Subject: [PATCH] Allow searching issues for strings containing colons

---
 .../filtered_search_manager.js.es6            |  2 +-
 .../filtered_search_tokenizer.js.es6          |  5 +++-
 changelogs/unreleased/28357-colon-search.yml  |  4 ++++
 .../filtered_search_tokenizer_spec.js.es6     | 23 +++++++++++++++++++
 4 files changed, 32 insertions(+), 2 deletions(-)
 create mode 100644 changelogs/unreleased/28357-colon-search.yml

diff --git a/app/assets/javascripts/filtered_search/filtered_search_manager.js.es6 b/app/assets/javascripts/filtered_search/filtered_search_manager.js.es6
index ffc7d29e4c5..13a9bf59246 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_manager.js.es6
+++ b/app/assets/javascripts/filtered_search/filtered_search_manager.js.es6
@@ -173,7 +173,7 @@
       tokens.forEach((token) => {
         const condition = gl.FilteredSearchTokenKeys
           .searchByConditionKeyValue(token.key, token.value.toLowerCase());
-        const { param } = gl.FilteredSearchTokenKeys.searchByKey(token.key);
+        const { param } = gl.FilteredSearchTokenKeys.searchByKey(token.key) || {};
         const keyParam = param ? `${token.key}_${param}` : token.key;
         let tokenPath = '';
 
diff --git a/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js.es6 b/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js.es6
index cf53845a48b..9bf1b1ced88 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js.es6
+++ b/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js.es6
@@ -1,9 +1,12 @@
+require('./filtered_search_token_keys');
+
 (() => {
   class FilteredSearchTokenizer {
     static processTokens(input) {
+      const allowedKeys = gl.FilteredSearchTokenKeys.get().map(i => i.key);
       // Regex extracts `(token):(symbol)(value)`
       // Values that start with a double quote must end in a double quote (same for single)
-      const tokenRegex = /(\w+):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\S+))/g;
+      const tokenRegex = new RegExp(`(${allowedKeys.join('|')}):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`, 'g');
       const tokens = [];
       let lastToken = null;
       const searchToken = input.replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
diff --git a/changelogs/unreleased/28357-colon-search.yml b/changelogs/unreleased/28357-colon-search.yml
new file mode 100644
index 00000000000..4bbb0dc12b2
--- /dev/null
+++ b/changelogs/unreleased/28357-colon-search.yml
@@ -0,0 +1,4 @@
+---
+title: Allow searching issues for strings containing colons
+merge_request:
+author:
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js.es6 b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js.es6
index 84c0e9cbfe2..a91801cfc89 100644
--- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js.es6
+++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js.es6
@@ -99,6 +99,29 @@ require('~/filtered_search/filtered_search_tokenizer');
         expect(results.tokens[2].value).toBe('Doing');
         expect(results.tokens[2].symbol).toBe('~');
       });
+
+      it('returns search value for invalid tokens', () => {
+        const results = gl.FilteredSearchTokenizer.processTokens('fake:token');
+        expect(results.lastToken).toBe('fake:token');
+        expect(results.searchToken).toBe('fake:token');
+        expect(results.tokens.length).toEqual(0);
+      });
+
+      it('returns search value and token for mix of valid and invalid tokens', () => {
+        const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token');
+        expect(results.tokens.length).toEqual(1);
+        expect(results.tokens[0].key).toBe('label');
+        expect(results.tokens[0].value).toBe('real');
+        expect(results.tokens[0].symbol).toBe('');
+        expect(results.lastToken).toBe('fake:token');
+        expect(results.searchToken).toBe('fake:token');
+      });
+
+      it('returns search value for invalid symbols', () => {
+        const results = gl.FilteredSearchTokenizer.processTokens('std::includes');
+        expect(results.lastToken).toBe('std::includes');
+        expect(results.searchToken).toBe('std::includes');
+      });
     });
   });
 })();
-- 
GitLab