+
+ {/*{isAutocompleteActive && (*/}
+ {/* */}
+ {/*)}*/}
);
};
diff --git a/client/src/components/Chat/index.tsx b/client/src/components/Chat/index.tsx
index 3e1def6681..814117d0de 100644
--- a/client/src/components/Chat/index.tsx
+++ b/client/src/components/Chat/index.tsx
@@ -6,6 +6,7 @@ import {
ChatMessage,
ChatMessageAuthor,
ChatMessageServer,
+ ChatMessageUser,
OpenChatHistoryItem,
} from '../../types/general';
import { AppNavigationContext } from '../../context/appNavigationContext';
@@ -15,6 +16,10 @@ import { mapLoadingSteps } from '../../mappers/conversation';
import { findElementInCurrentTab } from '../../utils/domUtils';
import { conversationsCache } from '../../services/cache';
import useResizeableWidth from '../../hooks/useResizeableWidth';
+import {
+ concatenateParsedQuery,
+ splitUserInputAfterAutocomplete,
+} from '../../utils';
import DeprecatedClientModal from './ChatFooter/DeprecatedClientModal';
import ChatHeader from './ChatHeader';
import ChatBody from './ChatBody';
@@ -66,7 +71,10 @@ const Chat = () => {
if (!query) {
return;
}
- console.log('query', query);
+ const cleanQuery = query
+ .replace(/\|(path:.*?)\|/, '$1')
+ .replace(/\|(lang:.*?)\|/, '$1'); // clean up after autocomplete
+ console.log('query', query, 'cleanQuery', cleanQuery);
prevEventSource?.close();
setInputValue('');
setLoading(true);
@@ -77,7 +85,7 @@ const Chat = () => {
? `/explain?relative_path=${encodeURIComponent(
options.filePath,
)}&line_start=${options.lineStart}&line_end=${options.lineEnd}`
- : `?q=${encodeURIComponent(query)}${
+ : `?q=${encodeURIComponent(cleanQuery)}${
selectedBranch ? ` branch:${selectedBranch}` : ''
}`
}&repo_ref=${tab.repoRef}${
@@ -300,6 +308,7 @@ const Chat = () => {
{
author: ChatMessageAuthor.User,
text: userQuery,
+ parsedQuery: splitUserInputAfterAutocomplete(userQuery),
isLoading: false,
},
]);
@@ -350,7 +359,10 @@ const Chat = () => {
stopGenerating();
}
setHideMessagesFrom(i);
- setInputValue(conversation[i].text!);
+ const mes = conversation[i] as ChatMessageUser;
+ setInputValue(
+ mes.parsedQuery ? concatenateParsedQuery(mes.parsedQuery) : mes.text!,
+ );
},
[isLoading, conversation],
);
diff --git a/client/src/locales/en.json b/client/src/locales/en.json
index 6c4cab6133..ca52c45d9f 100644
--- a/client/src/locales/en.json
+++ b/client/src/locales/en.json
@@ -427,5 +427,8 @@
"Diff generation failed": "Diff generation failed",
"Failed to apply the diff": "Failed to apply the diff",
"Cancel diff generation": "Cancel diff generation",
- "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally."
-}
\ No newline at end of file
+ "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.",
+ "Cloning": "Cloning",
+ "Directories": "Directories",
+ "Languages": "Languages"
+}
diff --git a/client/src/locales/es.json b/client/src/locales/es.json
index 6dea92a15e..0a69421d34 100644
--- a/client/src/locales/es.json
+++ b/client/src/locales/es.json
@@ -428,5 +428,8 @@
"Diff generation failed": "Falló la generación de la diferencia",
"Failed to apply the diff": "No se pudo aplicar la diferencia",
"Cancel diff generation": "Cancelar la generación de la diferencia",
- "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "Los siguientes cambios representan la diferencia de git para el repositorio remoto. Ten en cuenta que estos cambios no se pueden aplicar directamente a un repositorio remoto. Usa el botón \"Copiar\" para copiar los cambios y aplicarlos localmente."
-}
\ No newline at end of file
+ "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "Los siguientes cambios representan la diferencia de git para el repositorio remoto. Ten en cuenta que estos cambios no se pueden aplicar directamente a un repositorio remoto. Usa el botón \"Copiar\" para copiar los cambios y aplicarlos localmente.",
+ "Cloning": "Clonación",
+ "Directories": "Directorios",
+ "Languages": "Lenguas"
+}
diff --git a/client/src/locales/it.json b/client/src/locales/it.json
index 4a4468a2f9..2f9dd9895b 100644
--- a/client/src/locales/it.json
+++ b/client/src/locales/it.json
@@ -411,5 +411,8 @@
"Select library": "Seleziona libreria",
"Resync": "Risincronizzazione",
"Search docs or paste a URL to index": "Cerca documenti o incolla un URL per indicizzare",
- "Add docs": "Aggiungi documenti"
-}
\ No newline at end of file
+ "Add docs": "Aggiungi documenti",
+ "Cloning": "Clonazione",
+ "Directories": "Directory",
+ "Languages": "Le lingue"
+}
diff --git a/client/src/locales/ja.json b/client/src/locales/ja.json
index 4083ee56d1..7a57313dca 100644
--- a/client/src/locales/ja.json
+++ b/client/src/locales/ja.json
@@ -425,5 +425,8 @@
"Diff generation failed": "差分の生成に失敗しました",
"Failed to apply the diff": "差分の適用に失敗しました",
"Cancel diff generation": "差分生成をキャンセル",
- "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "以下の変更は、リモートリポジトリのGit差分を表しています。これらの変更はリモートリポジトリに直接適用できないことに注意してください。「コピー」ボタンを使用して変更をコピーし、ローカルに適用してください。"
-}
\ No newline at end of file
+ "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "以下の変更は、リモートリポジトリのGit差分を表しています。これらの変更はリモートリポジトリに直接適用できないことに注意してください。「コピー」ボタンを使用して変更をコピーし、ローカルに適用してください。",
+ "Cloning": "クローニング",
+ "Directories": "ディレクトリ",
+ "Languages": "言語"
+}
diff --git a/client/src/locales/zh-CN.json b/client/src/locales/zh-CN.json
index 57d238fb2f..e3ee2287fa 100644
--- a/client/src/locales/zh-CN.json
+++ b/client/src/locales/zh-CN.json
@@ -434,5 +434,8 @@
"Diff generation failed": "生成差异失败",
"Failed to apply the diff": "应用差异失败",
"Cancel diff generation": "取消生成差异",
- "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "以下的更改代表远程仓库的git差异。请注意,这些更改不能直接应用到远程仓库。使用“复制”按钮复制更改并在本地应用。"
-}
\ No newline at end of file
+ "The following changes represent the git diff for the remote repository. Please note that these changes cannot be applied directly to a remote repository. Use the \"Copy\" button to copy the changes and apply them locally.": "以下的更改代表远程仓库的git差异。请注意,这些更改不能直接应用到远程仓库。使用“复制”按钮复制更改并在本地应用。",
+ "Cloning": "克隆",
+ "Directories": "目录",
+ "Languages": "语言"
+}
diff --git a/client/src/mappers/conversation.ts b/client/src/mappers/conversation.ts
index d79e568b63..55d92da1f9 100644
--- a/client/src/mappers/conversation.ts
+++ b/client/src/mappers/conversation.ts
@@ -1,6 +1,10 @@
import flatten from 'lodash.flatten';
-import { SearchStepType } from '../types/api';
-import { ChatLoadingStep } from '../types/general';
+import { ConversationType, SearchStepType } from '../types/api';
+import {
+ ChatLoadingStep,
+ ParsedQueryType,
+ ParsedQueryTypeEnum,
+} from '../types/general';
export const mapLoadingSteps = (
searchSteps: SearchStepType[],
@@ -25,3 +29,54 @@ export const mapLoadingSteps = (
});
return flatten(arr);
};
+
+const mapQueryParts = (query: ConversationType['query']) => {
+ const array: {
+ type: ParsedQueryTypeEnum;
+ start: number;
+ end: number;
+ text: string;
+ }[] = [];
+ (
+ ['paths', 'langs', 'repos', 'branch'] as (keyof ConversationType['query'])[]
+ ).forEach((key) => {
+ array.push(
+ // @ts-ignore
+ ...query[key].map((s) => ({
+ type: key === 'branch' ? key : key.slice(0, -1),
+ start: s.Plain.start,
+ end: s.Plain.end,
+ text: s.Plain.content,
+ })),
+ );
+ });
+ return array;
+};
+
+export const mapUserQuery = (m: ConversationType): ParsedQueryType[] => {
+ const parsedQuery = [];
+ const parts = mapQueryParts(m.query).sort((a, b) => a.start - b.start);
+ let currentIndex = 0;
+ const originalString = m.query.raw_query;
+
+ for (const item of parts) {
+ if (currentIndex < item.start) {
+ const textBefore = originalString.substring(
+ currentIndex,
+ item.start - item.type.length - 1,
+ );
+ parsedQuery.push({ type: ParsedQueryTypeEnum.TEXT, text: textBefore });
+ currentIndex = item.start - item.type.length - 1;
+ }
+
+ parsedQuery.push({ type: item.type, text: item.text });
+ currentIndex = item.end;
+ }
+
+ if (currentIndex < originalString.length) {
+ const textAfter = originalString.substring(currentIndex);
+ parsedQuery.push({ type: ParsedQueryTypeEnum.TEXT, text: textAfter });
+ }
+
+ return parsedQuery;
+};
diff --git a/client/src/types/api.ts b/client/src/types/api.ts
index 25d742acc6..36f7e22985 100644
--- a/client/src/types/api.ts
+++ b/client/src/types/api.ts
@@ -79,6 +79,7 @@ export interface SearchResponseFile {
repo_name: string;
repo_ref: string;
lang: string;
+ is_dir: boolean;
}
export interface FlagItem {
@@ -249,7 +250,34 @@ export type SearchStepType = ProcStep | CodeStep | PathStep;
export type ConversationType = {
id: string;
search_steps: SearchStepType[];
- query: { target: { Plain: string } };
+ query: {
+ raw_query: string;
+ repos: [];
+ paths: {
+ Plain: { start: number; end: number; content: string };
+ }[];
+ langs: {
+ Plain: {
+ start: number;
+ end: number;
+ content: string;
+ };
+ }[];
+ branch: {
+ Plain: {
+ start: number;
+ end: number;
+ content: string;
+ };
+ }[];
+ target: {
+ Plain: {
+ start: number;
+ end: number;
+ content: string;
+ };
+ };
+ };
conclusion: string;
answer: string;
paths: string[];
diff --git a/client/src/types/general.ts b/client/src/types/general.ts
index 0e9c9b1abf..35a94e40ce 100644
--- a/client/src/types/general.ts
+++ b/client/src/types/general.ts
@@ -169,9 +169,18 @@ export enum ChatMessageAuthor {
Server = 'server',
}
-type ChatMessageUser = {
+export enum ParsedQueryTypeEnum {
+ TEXT = 'text',
+ PATH = 'path',
+ LANG = 'lang',
+ BRANCH = 'branch',
+}
+export type ParsedQueryType = { type: ParsedQueryTypeEnum; text: string };
+
+export type ChatMessageUser = {
author: ChatMessageAuthor.User;
text: string;
+ parsedQuery?: ParsedQueryType[];
isFromHistory?: boolean;
};
diff --git a/client/src/utils/index.test.ts b/client/src/utils/index.test.ts
new file mode 100644
index 0000000000..6f2b16b314
--- /dev/null
+++ b/client/src/utils/index.test.ts
@@ -0,0 +1,102 @@
+import { ParsedQueryTypeEnum } from '../types/general';
+import { splitUserInputAfterAutocomplete } from './index';
+
+describe('Utils', () => {
+ describe('splitUserInputAfterAutocomplete', () => {
+ test('simple string', () => {
+ expect(
+ JSON.stringify(splitUserInputAfterAutocomplete('my simple string')),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my simple string' },
+ ]),
+ );
+ });
+ test('filter at start', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete('|lang:TypeScript| my simple string'),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.LANG, text: 'TypeScript' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' my simple string' },
+ ]),
+ );
+ });
+ test('filter at the end', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete('my simple string |lang:TypeScript|'),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my simple string ' },
+ { type: ParsedQueryTypeEnum.LANG, text: 'TypeScript' },
+ ]),
+ );
+ });
+ test('lang filter in the middle', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete('my simple |lang:TypeScript| string'),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my simple ' },
+ { type: ParsedQueryTypeEnum.LANG, text: 'TypeScript' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' string' },
+ ]),
+ );
+ });
+ test('path filter in the middle', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete(
+ 'my |path:src/index.js| simple string',
+ ),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my ' },
+ { type: ParsedQueryTypeEnum.PATH, text: 'src/index.js' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' simple string' },
+ ]),
+ );
+ });
+ test('lang filter after path filter in the middle', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete(
+ 'my |path:src/index.js| simple |lang:TypeScript| string',
+ ),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my ' },
+ { type: ParsedQueryTypeEnum.PATH, text: 'src/index.js' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' simple ' },
+ { type: ParsedQueryTypeEnum.LANG, text: 'TypeScript' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' string' },
+ ]),
+ );
+ });
+ test('path filter after lang filter in the middle', () => {
+ expect(
+ JSON.stringify(
+ splitUserInputAfterAutocomplete(
+ 'my |lang:TypeScript| simple |path:src/index.js| string',
+ ),
+ ),
+ ).toEqual(
+ JSON.stringify([
+ { type: ParsedQueryTypeEnum.TEXT, text: 'my ' },
+ { type: ParsedQueryTypeEnum.LANG, text: 'TypeScript' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' simple ' },
+ { type: ParsedQueryTypeEnum.PATH, text: 'src/index.js' },
+ { type: ParsedQueryTypeEnum.TEXT, text: ' string' },
+ ]),
+ );
+ });
+ });
+});
diff --git a/client/src/utils/index.ts b/client/src/utils/index.ts
index dcc3b3cb22..8c80fffaff 100644
--- a/client/src/utils/index.ts
+++ b/client/src/utils/index.ts
@@ -1,7 +1,13 @@
import { MouseEvent } from 'react';
import { v4 as uuidv4 } from 'uuid';
-import { ja, zhCN, es } from 'date-fns/locale';
-import { LocaleType, RepoType, RepoUi } from '../types/general';
+import { es, ja, zhCN } from 'date-fns/locale';
+import {
+ LocaleType,
+ ParsedQueryType,
+ ParsedQueryTypeEnum,
+ RepoType,
+ RepoUi,
+} from '../types/general';
import langs from './langs.json';
export const copyToClipboard = (value: string) => {
@@ -78,7 +84,7 @@ export const getFileExtensionForLang = (lang: string, lowercased?: boolean) => {
ext = langs[key]?.[0];
}
}
- return 'index' + ext;
+ return 'index' + (ext || `.${lang}`);
};
export const getPrettyLangName = (lang: string) => {
@@ -388,3 +394,49 @@ export function mergeRanges(ranges: [number, number][]): [number, number][] {
return mergedRanges;
}
+
+export function splitUserInputAfterAutocomplete(
+ input: string,
+): ParsedQueryType[] {
+ const pathRegex = /\|path:(.*?)\|/g;
+ const langRegex = /\|lang:(.*?)\|/g;
+ const combinedRegex = /\|(path|lang):(.*?)\|/g;
+ const result: ParsedQueryType[] = [];
+
+ let lastIndex = 0;
+
+ const addTextContent = (text: string) => {
+ if (text.length > 0) {
+ result.push({ type: ParsedQueryTypeEnum.TEXT, text });
+ }
+ };
+
+ input.replace(combinedRegex, (_, type, text, index) => {
+ addTextContent(input.substring(lastIndex, index));
+ result.push({
+ type:
+ type === 'lang' ? ParsedQueryTypeEnum.LANG : ParsedQueryTypeEnum.PATH,
+ text,
+ });
+ lastIndex = index + text.length + type.length + 3; // 3 is the length of "(type:"
+ return '';
+ });
+
+ addTextContent(input.substring(lastIndex));
+
+ return result;
+}
+
+export function concatenateParsedQuery(query: ParsedQueryType[]) {
+ let result = '';
+ query.forEach((q) => {
+ if (q.type === ParsedQueryTypeEnum.TEXT) {
+ result += q.text;
+ } else if (q.type === ParsedQueryTypeEnum.PATH) {
+ result += `|path:${q.text}|`;
+ } else if (q.type === ParsedQueryTypeEnum.LANG) {
+ result += `|lang:${q.text}|`;
+ }
+ });
+ return result;
+}
diff --git a/package-lock.json b/package-lock.json
index f324e6d6de..d9aaa53e62 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -31,6 +31,7 @@
"react-dom": "^18.2.0",
"react-i18next": "^13.0.2",
"react-markdown": "^8.0.7",
+ "react-mentions": "^4.4.10",
"react-router-dom": "^6.14.2",
"react-virtualized-auto-sizer": "^1.0.20",
"react-window": "^1.8.9",
@@ -58,6 +59,7 @@
"@types/prismjs": "^1.26.0",
"@types/react": "^18.0.17",
"@types/react-dom": "^18.0.6",
+ "@types/react-mentions": "^4.1.12",
"@types/react-virtualized-auto-sizer": "^1.0.1",
"@types/react-window": "^1.8.5",
"@types/remarkable": "^2.0.3",
@@ -4110,6 +4112,15 @@
"@types/react": "*"
}
},
+ "node_modules/@types/react-mentions": {
+ "version": "4.1.12",
+ "resolved": "https://registry.npmjs.org/@types/react-mentions/-/react-mentions-4.1.12.tgz",
+ "integrity": "sha512-r1jQ/juhUFmMoZPbWRqxUHDWuqVntuz+CaKc52i45ghvpP83KBtVr9bydQo+hH+t9IFb703VjQ0RSCF3mxoe/Q==",
+ "dev": true,
+ "dependencies": {
+ "@types/react": "*"
+ }
+ },
"node_modules/@types/react-virtualized-auto-sizer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@types/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.1.tgz",
@@ -8457,6 +8468,14 @@
"node": ">= 0.4"
}
},
+ "node_modules/invariant": {
+ "version": "2.2.4",
+ "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz",
+ "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==",
+ "dependencies": {
+ "loose-envify": "^1.0.0"
+ }
+ },
"node_modules/is-arguments": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz",
@@ -13164,6 +13183,29 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz",
"integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w=="
},
+ "node_modules/react-mentions": {
+ "version": "4.4.10",
+ "resolved": "https://registry.npmjs.org/react-mentions/-/react-mentions-4.4.10.tgz",
+ "integrity": "sha512-JHiQlgF1oSZR7VYPjq32wy97z1w1oE4x10EuhKjPr4WUKhVzG1uFQhQjKqjQkbVqJrmahf+ldgBTv36NrkpKpA==",
+ "dependencies": {
+ "@babel/runtime": "7.4.5",
+ "invariant": "^2.2.4",
+ "prop-types": "^15.5.8",
+ "substyle": "^9.1.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8.3",
+ "react-dom": ">=16.8.3"
+ }
+ },
+ "node_modules/react-mentions/node_modules/@babel/runtime": {
+ "version": "7.4.5",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.4.5.tgz",
+ "integrity": "sha512-TuI4qpWZP6lGOGIuGWtp9sPluqYICmbk8T/1vpSysqJxRPkudh/ofFWyqdcMsDf2s7KvDL4/YHgKyvcS3g9CJQ==",
+ "dependencies": {
+ "regenerator-runtime": "^0.13.2"
+ }
+ },
"node_modules/react-router": {
"version": "6.14.2",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-6.14.2.tgz",
@@ -14118,6 +14160,18 @@
"inline-style-parser": "0.1.1"
}
},
+ "node_modules/substyle": {
+ "version": "9.4.1",
+ "resolved": "https://registry.npmjs.org/substyle/-/substyle-9.4.1.tgz",
+ "integrity": "sha512-VOngeq/W1/UkxiGzeqVvDbGDPM8XgUyJVWjrqeh+GgKqspEPiLYndK+XRcsKUHM5Muz/++1ctJ1QCF/OqRiKWA==",
+ "dependencies": {
+ "@babel/runtime": "^7.3.4",
+ "invariant": "^2.2.4"
+ },
+ "peerDependencies": {
+ "react": ">=16.8.3"
+ }
+ },
"node_modules/sucrase": {
"version": "3.34.0",
"resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.34.0.tgz",
diff --git a/package.json b/package.json
index f0fd7b6bca..ce7761ae44 100644
--- a/package.json
+++ b/package.json
@@ -33,6 +33,7 @@
"@types/prismjs": "^1.26.0",
"@types/react": "^18.0.17",
"@types/react-dom": "^18.0.6",
+ "@types/react-mentions": "^4.1.12",
"@types/react-virtualized-auto-sizer": "^1.0.1",
"@types/react-window": "^1.8.5",
"@types/remarkable": "^2.0.3",
@@ -91,6 +92,7 @@
"react-dom": "^18.2.0",
"react-i18next": "^13.0.2",
"react-markdown": "^8.0.7",
+ "react-mentions": "^4.4.10",
"react-router-dom": "^6.14.2",
"react-virtualized-auto-sizer": "^1.0.20",
"react-window": "^1.8.9",
@@ -105,4 +107,4 @@
"node": "16.17.0",
"npm": "8.19.0"
}
-}
\ No newline at end of file
+}
diff --git a/server/bleep/src/agent.rs b/server/bleep/src/agent.rs
index 5d470fe9e4..995e982956 100644
--- a/server/bleep/src/agent.rs
+++ b/server/bleep/src/agent.rs
@@ -1,4 +1,4 @@
-use std::{sync::Arc, time::Duration};
+use std::{ops::Deref, sync::Arc, time::Duration};
use anyhow::{anyhow, Context, Result};
use futures::{Future, TryStreamExt};
@@ -348,13 +348,45 @@ impl Agent {
threshold: f32,
retrieve_more: bool,
) -> Result
> {
+ let paths_set = paths
+ .into_iter()
+ .map(|p| parser::Literal::Plain(p.into()))
+ .collect::>();
+
+ let paths = if paths_set.is_empty() {
+ self.last_exchange().query.paths.clone()
+ } else if self.last_exchange().query.paths.is_empty() {
+ paths_set
+ } else {
+ paths_set
+ .into_iter()
+ .zip(self.last_exchange().query.paths.clone())
+ .flat_map(|(llm, user)| {
+ if llm
+ .as_plain()
+ .unwrap()
+ .starts_with(user.as_plain().unwrap().as_ref())
+ {
+ // llm-defined is more specific than user request
+ vec![llm]
+ } else if user
+ .as_plain()
+ .unwrap()
+ .starts_with(llm.as_plain().unwrap().as_ref())
+ {
+ // user-defined is more specific than llm request
+ vec![user]
+ } else {
+ vec![llm, user]
+ }
+ })
+ .collect()
+ };
+
let query = parser::SemanticQuery {
target: Some(query),
repos: [parser::Literal::Plain(self.repo_ref.display_name().into())].into(),
- paths: paths
- .iter()
- .map(|p| parser::Literal::Plain(p.into()))
- .collect(),
+ paths,
..self.last_exchange().query.clone()
};
@@ -409,12 +441,13 @@ impl Agent {
query: &str,
) -> impl Iterator- + 'a {
let branch = self.last_exchange().query.first_branch();
+ let langs = self.last_exchange().query.langs.iter().map(Deref::deref);
debug!(%self.repo_ref, query, ?branch, %self.thread_id, "executing fuzzy search");
self.app
.indexes
.file
- .fuzzy_path_match(&self.repo_ref, query, branch.as_deref(), 50)
+ .fuzzy_path_match(&self.repo_ref, query, branch.as_deref(), langs, 50)
.await
}
diff --git a/server/bleep/src/agent/exchange.rs b/server/bleep/src/agent/exchange.rs
index f8a4de3bf8..89a050c5f5 100644
--- a/server/bleep/src/agent/exchange.rs
+++ b/server/bleep/src/agent/exchange.rs
@@ -1,5 +1,5 @@
use crate::query::parser::SemanticQuery;
-use std::{fmt, mem};
+use std::fmt;
use chrono::prelude::{DateTime, Utc};
use rand::seq::SliceRandom;
@@ -102,17 +102,16 @@ impl Exchange {
///
/// This is used to reduce the size of an exchange when we send it over the wire, by removing
/// data that the front-end does not use.
- pub fn compressed(&self) -> Self {
- let mut ex = self.clone();
-
- ex.code_chunks.clear();
- ex.paths.clear();
- ex.search_steps = mem::take(&mut ex.search_steps)
+ pub fn compressed(mut self) -> Self {
+ self.code_chunks.clear();
+ self.paths.clear();
+ self.search_steps = self
+ .search_steps
.into_iter()
.map(|step| step.compressed())
.collect();
- ex
+ self
}
}
diff --git a/server/bleep/src/indexes/file.rs b/server/bleep/src/indexes/file.rs
index c60b32be9e..bbc63aea01 100644
--- a/server/bleep/src/indexes/file.rs
+++ b/server/bleep/src/indexes/file.rs
@@ -243,6 +243,7 @@ impl Indexer {
repo_ref: &RepoRef,
query_str: &str,
branch: Option<&str>,
+ langs: impl Iterator
- ,
limit: usize,
) -> impl Iterator
- + '_ {
// lifted from query::compiler
@@ -263,6 +264,14 @@ impl Indexer {
.collect::>()
})
.map(BooleanQuery::intersection);
+ let langs_query = BooleanQuery::union(
+ langs
+ .map(|l| Term::from_field_bytes(self.source.lang, l.as_bytes()))
+ .map(|t| TermQuery::new(t, IndexRecordOption::Basic))
+ .map(Box::new)
+ .map(|q| q as Box)
+ .collect::>(),
+ );
let mut hits = trigrams(query_str)
.flat_map(|s| case_permutations(s.as_str()))
.map(|token| Term::from_field_text(self.source.relative_path, token.as_str()))
@@ -273,6 +282,7 @@ impl Indexer {
repo_ref_term.clone(),
IndexRecordOption::Basic,
)),
+ Box::new(langs_query.clone()),
];
if let Some(b) = branch_term.as_ref() {
diff --git a/server/bleep/src/indexes/reader.rs b/server/bleep/src/indexes/reader.rs
index 90562deff3..b883425783 100644
--- a/server/bleep/src/indexes/reader.rs
+++ b/server/bleep/src/indexes/reader.rs
@@ -64,6 +64,7 @@ pub struct FileDocument {
pub lang: Option,
pub branches: String,
pub indexed: bool,
+ pub is_dir: bool,
}
pub struct RepoDocument {
@@ -104,7 +105,7 @@ impl DocumentRead for ContentReader {
.literal(schema.relative_path, |q| q.path.clone())
.literal(schema.repo_name, |q| q.repo.clone())
.literal(schema.branches, |q| q.branch.clone())
- .byte_string(schema.lang, |q| q.lang.as_ref())
+ .byte_string(schema.lang, |q| q.lang.as_ref().map(AsRef::as_ref))
.literal(schema.symbols, |q| {
q.target.as_ref().and_then(Target::symbol).cloned()
})
@@ -195,7 +196,7 @@ impl DocumentRead for FileReader {
.literal(schema.relative_path, |q| q.path.clone())
.literal(schema.repo_name, |q| q.repo.clone())
.literal(schema.branches, |q| q.branch.clone())
- .byte_string(schema.lang, |q| q.lang.as_ref())
+ .byte_string(schema.lang, |q| q.lang.as_ref().map(AsRef::as_ref))
.compile(queries, tantivy_index)
}
@@ -206,6 +207,7 @@ impl DocumentRead for FileReader {
let lang = read_lang_field(&doc, schema.lang);
let branches = read_text_field(&doc, schema.branches);
let indexed = read_bool_field(&doc, schema.indexed);
+ let is_dir = read_bool_field(&doc, schema.is_directory);
FileDocument {
relative_path,
@@ -214,6 +216,7 @@ impl DocumentRead for FileReader {
lang,
branches,
indexed,
+ is_dir,
}
}
}
@@ -325,7 +328,7 @@ impl DocumentRead for OpenReader {
}
_ => None,
})
- .byte_string(schema.lang, |q| q.lang.as_ref())
+ .byte_string(schema.lang, |q| q.lang.as_ref().map(AsRef::as_ref))
.compile(queries, tantivy_index)
}
diff --git a/server/bleep/src/indexes/schema.rs b/server/bleep/src/indexes/schema.rs
index 02bc3a6fce..54ac40fe96 100644
--- a/server/bleep/src/indexes/schema.rs
+++ b/server/bleep/src/indexes/schema.rs
@@ -104,7 +104,7 @@ impl File {
let raw_repo_name = builder.add_bytes_field("raw_repo_name", FAST);
let raw_relative_path = builder.add_bytes_field("raw_relative_path", FAST);
- let is_directory = builder.add_bool_field("is_directory", FAST);
+ let is_directory = builder.add_bool_field("is_directory", FAST | STORED);
let indexed = builder.add_bool_field("indexed", STORED);
Self {
diff --git a/server/bleep/src/query/execute.rs b/server/bleep/src/query/execute.rs
index f8a187c6bb..3c47aef59f 100644
--- a/server/bleep/src/query/execute.rs
+++ b/server/bleep/src/query/execute.rs
@@ -154,6 +154,7 @@ pub struct FileResultData {
lang: Option,
branches: String,
indexed: bool,
+ is_dir: bool,
}
impl FileResultData {
@@ -164,6 +165,7 @@ impl FileResultData {
lang: Option,
branches: String,
indexed: bool,
+ is_dir: bool,
) -> Self {
Self {
repo_name,
@@ -172,6 +174,7 @@ impl FileResultData {
lang,
branches,
indexed,
+ is_dir,
}
}
}
@@ -502,6 +505,7 @@ impl ExecuteQuery for FileReader {
lang: f.lang,
branches: f.branches,
indexed: f.indexed,
+ is_dir: f.is_dir,
})
})
.collect::>();
diff --git a/server/bleep/src/query/languages.rs b/server/bleep/src/query/languages.rs
index 0111c374a9..a54d4c553d 100644
--- a/server/bleep/src/query/languages.rs
+++ b/server/bleep/src/query/languages.rs
@@ -1,9 +1,9 @@
-use std::borrow::Cow;
+use std::{borrow::Cow, collections::HashSet};
include!(concat!(env!("OUT_DIR"), "/languages.rs"));
-pub fn parse_alias(lang: Cow) -> Cow {
- if let Some(s) = EXT_MAP.get(&lang) {
+pub fn parse_alias(lang: &str) -> Cow<'static, str> {
+ if let Some(s) = EXT_MAP.get(lang) {
(*s).into()
} else {
lang.to_ascii_lowercase().into()
@@ -18,6 +18,14 @@ pub fn proper_case(lower: Cow) -> Cow {
}
}
+pub fn list() -> impl Iterator
- {
+ EXT_MAP
+ .entries()
+ .flat_map(|e| [*e.0, *e.1])
+ .collect::>()
+ .into_iter()
+}
+
#[cfg(test)]
mod test {
use super::*;
diff --git a/server/bleep/src/query/parser.rs b/server/bleep/src/query/parser.rs
index 5af87ca57c..2d56f11a34 100644
--- a/server/bleep/src/query/parser.rs
+++ b/server/bleep/src/query/parser.rs
@@ -1,7 +1,7 @@
use pest::{iterators::Pair, Parser};
use regex::Regex;
use smallvec::{smallvec, SmallVec};
-use std::{borrow::Cow, collections::HashSet, mem};
+use std::{borrow::Cow, mem, ops::Deref};
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct Query<'a> {
@@ -12,7 +12,7 @@ pub struct Query<'a> {
pub org: Option>,
pub repo: Option>,
pub path: Option>,
- pub lang: Option>,
+ pub lang: Option>,
pub branch: Option>,
pub target: Option>,
}
@@ -25,10 +25,11 @@ pub enum Target<'a> {
#[derive(Default, Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct SemanticQuery<'a> {
- pub repos: HashSet>,
- pub paths: HashSet>,
- pub langs: HashSet>,
- pub branch: HashSet>,
+ pub raw_query: String,
+ pub repos: Vec>,
+ pub paths: Vec>,
+ pub langs: Vec>,
+ pub branch: Vec>,
pub target: Option>,
}
@@ -42,7 +43,7 @@ impl<'a> SemanticQuery<'a> {
}
pub fn langs(&'a self) -> impl Iterator
- > {
- self.langs.iter().cloned()
+ self.langs.iter().filter_map(|t| t.as_plain())
}
pub fn target(&self) -> Option> {
@@ -57,31 +58,28 @@ impl<'a> SemanticQuery<'a> {
// first branch because the UX operates on a single "current" branch. We can likely update
// `SemanticQuery` to remove multiple branches altogether.
pub fn first_branch(&self) -> Option> {
- self.branch.iter().next().map(|t| t.clone().unwrap())
+ self.branch.first().map(|t| t.clone().unwrap())
}
// Ditto for repo
pub fn first_repo(&self) -> Option> {
- self.repos.iter().next().map(|t| t.clone().unwrap())
+ self.repos.first().map(|t| t.clone().unwrap())
}
pub fn from_str(query: String, repo_ref: String) -> Self {
Self {
- target: Some(Literal::Plain(Cow::Owned(query))),
- repos: [Literal::Plain(Cow::Owned(repo_ref))].into(),
+ target: Some(Literal::Plain(query.into())),
+ repos: [Literal::Plain(repo_ref.into())].into(),
..Default::default()
}
}
pub fn into_owned(self) -> SemanticQuery<'static> {
SemanticQuery {
+ raw_query: self.raw_query.clone(),
repos: self.repos.into_iter().map(Literal::into_owned).collect(),
paths: self.paths.into_iter().map(Literal::into_owned).collect(),
- langs: self
- .langs
- .into_iter()
- .map(|c| c.into_owned().into())
- .collect(),
+ langs: self.langs.into_iter().map(Literal::into_owned).collect(),
branch: self.branch.into_iter().map(Literal::into_owned).collect(),
target: self.target.map(Literal::into_owned),
}
@@ -171,6 +169,14 @@ impl<'a> Query<'a> {
}
impl<'a> Target<'a> {
+ /// Get the inner literal for this target, regardless of the variant.
+ pub fn literal_mut(&'a mut self) -> &mut Literal<'a> {
+ match self {
+ Self::Symbol(lit) => lit,
+ Self::Content(lit) => lit,
+ }
+ }
+
/// Get the inner literal for this target, regardless of the variant.
pub fn literal(&self) -> &Literal<'_> {
match self {
@@ -217,10 +223,69 @@ pub enum ParseError {
MultiMode,
}
-#[derive(Debug, PartialEq, Eq, Clone, Hash, serde::Serialize, serde::Deserialize)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum Literal<'a> {
- Plain(Cow<'a, str>),
- Regex(Cow<'a, str>),
+ Plain(LiteralInner<'a>),
+ Regex(LiteralInner<'a>),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
+pub struct LiteralInner<'a> {
+ start: usize,
+ end: usize,
+ content: Cow<'a, str>,
+}
+
+impl<'a> LiteralInner<'a> {
+ fn new(start: usize, end: usize, content: impl Into>) -> Self {
+ Self {
+ start,
+ end,
+ content: content.into(),
+ }
+ }
+
+ fn to_owned(&self) -> LiteralInner<'static> {
+ LiteralInner {
+ start: self.start,
+ end: self.end,
+ content: Cow::Owned(self.content.to_string()),
+ }
+ }
+}
+
+impl<'a, T: AsRef> From for LiteralInner<'a> {
+ fn from(value: T) -> Self {
+ Self {
+ start: 0,
+ end: 0,
+ content: value.as_ref().to_owned().into(),
+ }
+ }
+}
+
+impl<'a> Deref for LiteralInner<'a> {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ self.content.as_ref()
+ }
+}
+
+impl<'a> Default for LiteralInner<'a> {
+ fn default() -> Self {
+ Self {
+ start: 0,
+ end: 0,
+ content: Cow::Borrowed(""),
+ }
+ }
+}
+
+impl<'a> From> for Literal<'a> {
+ fn from(value: Cow<'a, str>) -> Self {
+ Literal::Plain(value.clone().into())
+ }
}
impl From<&String> for Literal<'static> {
@@ -229,23 +294,31 @@ impl From<&String> for Literal<'static> {
}
}
+impl From<&str> for Literal<'static> {
+ fn from(value: &str) -> Self {
+ Literal::Plain(value.to_owned().into())
+ }
+}
+
impl<'a> Default for Literal<'a> {
fn default() -> Self {
- Self::Plain(Cow::Borrowed(""))
+ Literal::Plain(Default::default())
}
}
impl<'a> Literal<'a> {
- fn join_as_regex(self, rhs: Self) -> Self {
+ /// This drops position information, as it's not intelligible after the merge
+ fn join_as_regex(self, rhs: Self) -> Literal<'static> {
let lhs = self.regex_str();
let rhs = rhs.regex_str();
- Self::Regex(Cow::Owned(format!("{lhs}\\s+{rhs}")))
+ Literal::Regex(format!("{lhs}\\s+{rhs}").into())
}
- fn join_as_plain(self, rhs: Self) -> Option {
+ /// This drops position information, as it's not intelligible after the merge
+ fn join_as_plain(self, rhs: Self) -> Option> {
let lhs = self.as_plain()?;
let rhs = rhs.as_plain()?;
- Some(Self::Plain(Cow::Owned(format!("{lhs} {rhs}"))))
+ Some(Literal::Plain(format!("{lhs} {rhs}").into()))
}
/// Convert this literal into a regex string.
@@ -255,7 +328,7 @@ impl<'a> Literal<'a> {
pub fn regex_str(&self) -> Cow<'a, str> {
match self {
Self::Plain(text) => regex::escape(text).into(),
- Self::Regex(r) => r.clone(),
+ Self::Regex(r) => r.content.clone(),
}
}
@@ -265,7 +338,7 @@ impl<'a> Literal<'a> {
pub fn as_plain(&self) -> Option> {
match self {
- Self::Plain(p) => Some(p.clone()),
+ Self::Plain(p) => Some(p.content.clone()),
Self::Regex(..) => None,
}
}
@@ -279,32 +352,70 @@ impl<'a> Literal<'a> {
pub fn unwrap(self) -> Cow<'a, str> {
match self {
- Literal::Plain(v) => v,
- Literal::Regex(v) => v,
+ Literal::Plain(v) => v.content,
+ Literal::Regex(v) => v.content,
}
}
pub fn into_owned(self) -> Literal<'static> {
match self {
- Literal::Plain(cow) => Literal::Plain(Cow::Owned(cow.into_owned())),
- Literal::Regex(cow) => Literal::Regex(Cow::Owned(cow.into_owned())),
+ Literal::Plain(cow) => Literal::Plain(cow.to_owned()),
+ Literal::Regex(cow) => Literal::Regex(cow.to_owned()),
+ }
+ }
+
+ pub fn start(&self) -> usize {
+ match self {
+ Literal::Plain(inner) => inner.start,
+ Literal::Regex(inner) => inner.start,
}
}
}
impl<'a> From> for Literal<'a> {
fn from(pair: Pair<'a, Rule>) -> Self {
+ let start = pair.as_span().start();
+ let end = pair.as_span().end();
+
match pair.as_rule() {
- Rule::unquoted_literal => Self::Plain(pair.as_str().trim().into()),
- Rule::quoted_literal => Self::Plain(unescape(pair.as_str(), '"').into()),
- Rule::single_quoted_literal => Self::Plain(unescape(pair.as_str(), '\'').into()),
- Rule::regex_quoted_literal => Self::Regex(unescape(pair.as_str(), '/').into()),
- Rule::raw_text => Self::Plain(pair.as_str().trim().into()),
+ Rule::unquoted_literal => {
+ Self::Plain(LiteralInner::new(start, end, pair.as_str().trim()))
+ }
+ Rule::quoted_literal => {
+ Self::Plain(LiteralInner::new(start, end, unescape(pair.as_str(), '"')))
+ }
+ Rule::single_quoted_literal => {
+ Self::Plain(LiteralInner::new(start, end, unescape(pair.as_str(), '\'')))
+ }
+ Rule::regex_quoted_literal => {
+ Self::Regex(LiteralInner::new(start, end, unescape(pair.as_str(), '/')))
+ }
+ Rule::raw_text => Self::Plain(LiteralInner::new(start, end, pair.as_str().trim())),
_ => unreachable!(),
}
}
}
+impl<'a, 'b: 'a> AsRef> for Literal<'b> {
+ fn as_ref(&self) -> &Cow<'a, str> {
+ match self {
+ Literal::Plain(inner) => &inner.content,
+ Literal::Regex(inner) => &inner.content,
+ }
+ }
+}
+
+impl Deref for Literal<'_> {
+ type Target = str;
+
+ fn deref(&self) -> &Self::Target {
+ match self {
+ Literal::Plain(inner) => inner.as_ref(),
+ Literal::Regex(inner) => inner.as_ref(),
+ }
+ }
+}
+
/// Unescape a string, with a specific terminating character.
///
/// Newline and tab strings (`\n` and `\t`) are replaced with the respective character. Backslashes
@@ -353,7 +464,7 @@ enum Expr<'a> {
Repo(Literal<'a>),
Symbol(Literal<'a>),
Path(Literal<'a>),
- Lang(Cow<'a, str>),
+ Lang(Literal<'a>),
Content(Literal<'a>),
Branch(Literal<'a>),
@@ -378,7 +489,7 @@ impl<'a> Expr<'a> {
Rule::symbol => Symbol(Literal::from(pair.into_inner().next().unwrap())),
Rule::org => Org(Literal::from(pair.into_inner().next().unwrap())),
Rule::branch => Branch(Literal::from(pair.into_inner().next().unwrap())),
- Rule::lang => Lang(pair.into_inner().as_str().into()),
+ Rule::lang => Lang(Literal::from(pair.into_inner().next().unwrap())),
Rule::open => {
let inner = pair.into_inner().next().unwrap();
@@ -476,49 +587,68 @@ pub fn parse(query: &str) -> Result>, ParseError> {
}
pub fn parse_nl(query: &str) -> Result, ParseError> {
+ let raw_query = query.to_string();
+ let mut target = "".to_string();
+
let pairs = PestParser::parse(Rule::nl_query, query).map_err(Box::new)?;
- let mut repos = HashSet::new();
- let mut paths = HashSet::new();
- let mut langs = HashSet::new();
- let mut branch = HashSet::new();
- let mut target: Option = None;
+ let mut repos = Vec::new();
+ let mut paths = Vec::new();
+ let mut langs = Vec::new();
+ let mut branch = Vec::new();
+
+ let mut extend_query = |q: &str| {
+ if !target.is_empty() {
+ target += " ";
+ }
+ target += q;
+ };
+
for pair in pairs {
match pair.as_rule() {
Rule::repo => {
let item = Literal::from(pair.into_inner().next().unwrap());
- let _ = repos.insert(item);
+ repos.push(item);
}
Rule::path => {
let item = Literal::from(pair.into_inner().next().unwrap());
- let _ = paths.insert(item);
+ extend_query(&item);
+ paths.push(item);
}
Rule::branch => {
let item = Literal::from(pair.into_inner().next().unwrap());
- let _ = branch.insert(item);
+ branch.push(item);
}
Rule::lang => {
- let item = super::languages::parse_alias(pair.into_inner().as_str().into());
- let _ = langs.insert(item);
+ let inner = pair.into_inner().next().unwrap();
+ let item = Literal::Plain(LiteralInner {
+ content: super::languages::parse_alias(inner.as_str()),
+ start: inner.as_span().start(),
+ end: inner.as_span().end(),
+ });
+
+ extend_query(&item);
+ langs.push(item);
}
Rule::raw_text => {
let rhs = Literal::from(pair);
- if let Some(t) = target {
- target = t.join_as_plain(rhs);
- } else {
- target = Some(rhs);
- }
+ extend_query(&rhs);
}
_ => {}
}
}
Ok(SemanticQuery {
+ raw_query,
repos,
paths,
langs,
branch,
- target,
+ target: if target.is_empty() {
+ None
+ } else {
+ Some(Literal::from(&target))
+ },
})
}
@@ -546,7 +676,7 @@ fn flatten(root: Expr<'_>) -> SmallVec<[Query<'_>; 1]> {
..Default::default()
}],
Expr::Lang(lang) => smallvec![Query {
- lang: Some(super::languages::parse_alias(lang)),
+ lang: Some(super::languages::parse_alias(&lang).into()),
..Default::default()
}],
Expr::Content(lit) => smallvec![Query {
@@ -593,7 +723,11 @@ mod tests {
assert_eq!(
parse("ParseError").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 10,
+ content: "ParseError".into()
+ }))),
..Query::default()
}],
);
@@ -601,10 +735,26 @@ mod tests {
assert_eq!(
parse("org:bloopai repo:enterprise-search branch:origin/main ParseError").unwrap(),
vec![Query {
- repo: Some(Literal::Plain("enterprise-search".into())),
- org: Some(Literal::Plain("bloopai".into())),
- branch: Some(Literal::Plain("origin/main".into())),
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 17,
+ end: 34,
+ content: "enterprise-search".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 4,
+ end: 11,
+ content: "bloopai".into()
+ })),
+ branch: Some(Literal::Plain(LiteralInner {
+ start: 42,
+ end: 53,
+ content: "origin/main".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 54,
+ end: 64,
+ content: "ParseError".into()
+ }))),
..Query::default()
}],
);
@@ -612,9 +762,21 @@ mod tests {
assert_eq!(
parse("org:bloopai repo:enterprise-search ParseError").unwrap(),
vec![Query {
- repo: Some(Literal::Plain("enterprise-search".into())),
- org: Some(Literal::Plain("bloopai".into())),
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 17,
+ end: 34,
+ content: "enterprise-search".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 4,
+ end: 11,
+ content: "bloopai".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 35,
+ end: 45,
+ content: "ParseError".into()
+ }))),
..Query::default()
}],
);
@@ -622,7 +784,11 @@ mod tests {
assert_eq!(
parse("content:ParseError").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 8,
+ end: 18,
+ content: "ParseError".into()
+ }))),
..Query::default()
}],
);
@@ -631,8 +797,16 @@ mod tests {
assert_eq!(
parse("path:foo.c create_foo symbol:bar").unwrap(),
vec![Query {
- path: Some(Literal::Plain("foo.c".into())),
- target: Some(Target::Symbol(Literal::Plain("bar".into()))),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 5,
+ end: 10,
+ content: "foo.c".into()
+ })),
+ target: Some(Target::Symbol(Literal::Plain(LiteralInner {
+ start: 29,
+ end: 32,
+ content: "bar".into()
+ }))),
..Query::default()
}],
);
@@ -641,7 +815,11 @@ mod tests {
parse("case:ignore Parse").unwrap(),
vec![Query {
case_sensitive: Some(false),
- target: Some(Target::Content(Literal::Plain("Parse".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 12,
+ end: 17,
+ content: "Parse".into()
+ }))),
..Query::default()
}],
);
@@ -653,12 +831,24 @@ mod tests {
parse("repo:foo ParseError or repo:bar").unwrap(),
vec![
Query {
- repo: Some(Literal::Plain("foo".into())),
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 5,
+ end: 8,
+ content: "foo".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 9,
+ end: 19,
+ content: "ParseError".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("bar".into())),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 28,
+ end: 31,
+ content: "bar".into()
+ })),
..Query::default()
},
],
@@ -669,12 +859,24 @@ mod tests {
parse("repo:bar or repo:foo ParseError").unwrap(),
vec![
Query {
- repo: Some(Literal::Plain("bar".into())),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 5,
+ end: 8,
+ content: "bar".into()
+ })),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("foo".into())),
- target: Some(Target::Content(Literal::Plain("ParseError".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 17,
+ end: 20,
+ content: "foo".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 21,
+ end: 31,
+ content: "ParseError".into()
+ }))),
..Query::default()
},
],
@@ -742,25 +944,65 @@ mod tests {
parse("(((repo:foo xyz) or repo:abc) (repo:fred or repo:grub) org:bloop)").unwrap(),
vec![
Query {
- repo: Some(Literal::Plain("fred".into())),
- org: Some(Literal::Plain("bloop".into())),
- target: Some(Target::Content(Literal::Plain("xyz".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 36,
+ end: 40,
+ content: "fred".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 59,
+ end: 64,
+ content: "bloop".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 12,
+ end: 15,
+ content: "xyz".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("grub".into())),
- org: Some(Literal::Plain("bloop".into())),
- target: Some(Target::Content(Literal::Plain("xyz".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 49,
+ end: 53,
+ content: "grub".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 59,
+ end: 64,
+ content: "bloop".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 12,
+ end: 15,
+ content: "xyz".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("fred".into())),
- org: Some(Literal::Plain("bloop".into())),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 36,
+ end: 40,
+ content: "fred".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 59,
+ end: 64,
+ content: "bloop".into()
+ })),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("grub".into())),
- org: Some(Literal::Plain("bloop".into())),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 49,
+ end: 53,
+ content: "grub".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 59,
+ end: 64,
+ content: "bloop".into()
+ })),
..Query::default()
},
],
@@ -773,27 +1015,63 @@ mod tests {
parse("(repo:bloop or repo:google) Parser or repo:zoekt Parsing or (symbol:Compiler or (org:bloop repo:enterprise-search))").unwrap(),
vec![
Query {
- repo: Some(Literal::Plain("bloop".into())),
- target: Some(Target::Content(Literal::Plain("Parser".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 6,
+ end: 11,
+ content: "bloop".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 28,
+ end: 34,
+ content: "Parser".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("google".into())),
- target: Some(Target::Content(Literal::Plain("Parser".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 20,
+ end: 26,
+ content: "google".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 28,
+ end: 34,
+ content: "Parser".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("zoekt".into())),
- target: Some(Target::Content(Literal::Plain("Parsing".into()))),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 43,
+ end: 48,
+ content: "zoekt".into()
+ })),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 49,
+ end: 56,
+ content: "Parsing".into()
+ }))),
..Query::default()
},
Query {
- target: Some(Target::Symbol(Literal::Plain("Compiler".into()))),
+ target: Some(Target::Symbol(Literal::Plain(LiteralInner {
+ start: 68,
+ end: 76,
+ content: "Compiler".into()
+ }))),
..Query::default()
},
Query {
- repo: Some(Literal::Plain("enterprise-search".into())),
- org: Some(Literal::Plain("bloop".into())),
+ repo: Some(Literal::Plain(LiteralInner {
+ start: 96,
+ end: 113,
+ content: "enterprise-search".into()
+ })),
+ org: Some(Literal::Plain(LiteralInner {
+ start: 85,
+ end: 90,
+ content: "bloop".into()
+ })),
..Query::default()
},
],
@@ -805,7 +1083,11 @@ mod tests {
assert_eq!(
parse("path:foo/bar.js").unwrap(),
vec![Query {
- path: Some(Literal::Plain("foo/bar.js".into())),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 5,
+ end: 15,
+ content: "foo/bar.js".into(),
+ })),
..Query::default()
}],
);
@@ -829,8 +1111,12 @@ mod tests {
assert_eq!(
parse("lang:Rust path:server").unwrap(),
vec![Query {
- path: Some(Literal::Plain("server".into())),
- lang: Some("rust".into()),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 15,
+ end: 21,
+ content: "server".into()
+ })),
+ lang: Some(Literal::Plain("rust".into())),
..Query::default()
}],
);
@@ -842,7 +1128,11 @@ mod tests {
parse("open:true path:server/bleep/Cargo.toml").unwrap(),
vec![Query {
open: Some(true),
- path: Some(Literal::Plain("server/bleep/Cargo.toml".into())),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 15,
+ end: 38,
+ content: "server/bleep/Cargo.toml".into()
+ })),
..Query::default()
}],
);
@@ -851,7 +1141,11 @@ mod tests {
parse("open:false path:server/bleep/Cargo.toml").unwrap(),
vec![Query {
open: Some(false),
- path: Some(Literal::Plain("server/bleep/Cargo.toml".into())),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 16,
+ end: 39,
+ content: "server/bleep/Cargo.toml".into()
+ })),
..Query::default()
}],
);
@@ -860,7 +1154,11 @@ mod tests {
parse("path:server/bleep/Cargo.toml").unwrap(),
vec![Query {
open: None,
- path: Some(Literal::Plain("server/bleep/Cargo.toml".into())),
+ path: Some(Literal::Plain(LiteralInner {
+ start: 5,
+ end: 28,
+ content: "server/bleep/Cargo.toml".into()
+ })),
..Query::default()
}],
);
@@ -871,7 +1169,11 @@ mod tests {
assert_eq!(
parse("foo\\nbar\\tquux").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("foo\\nbar\\tquux".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 14,
+ content: "foo\\nbar\\tquux".into()
+ }))),
..Query::default()
}],
);
@@ -879,9 +1181,11 @@ mod tests {
assert_eq!(
parse("/^\\b\\B\\w\\Wfoo\\d\\D$/").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Regex(
- "^\\b\\B\\w\\Wfoo\\d\\D$".into()
- ))),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 1,
+ end: 18,
+ content: "^\\b\\B\\w\\Wfoo\\d\\D$".into()
+ }))),
..Query::default()
}],
);
@@ -893,7 +1197,11 @@ mod tests {
parse("global_regex:true foo").unwrap(),
vec![Query {
global_regex: Some(true),
- target: Some(Target::Content(Literal::Regex("foo".into()))),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 18,
+ end: 21,
+ content: "foo".into()
+ }))),
..Query::default()
}],
);
@@ -903,7 +1211,11 @@ mod tests {
parse("global_regex:true /foo/").unwrap(),
vec![Query {
global_regex: Some(true),
- target: Some(Target::Content(Literal::Regex("foo".into()))),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 19,
+ end: 22,
+ content: "foo".into()
+ }))),
..Query::default()
}],
);
@@ -912,7 +1224,11 @@ mod tests {
assert_eq!(
parse("foo").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("foo".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "foo".into()
+ }))),
..Query::default()
}],
);
@@ -926,16 +1242,40 @@ mod tests {
vec![
Query {
global_regex: Some(true),
- org: Some(Literal::Regex("bloopai".into())),
- repo: Some(Literal::Regex("bloop".into())),
- path: Some(Literal::Regex("server".into())),
- target: Some(Target::Content(Literal::Regex("foo".into()))),
+ org: Some(Literal::Regex(LiteralInner {
+ start: 23,
+ end: 30,
+ content: "bloopai".into(),
+ })),
+ repo: Some(Literal::Regex(LiteralInner {
+ start: 36,
+ end: 41,
+ content: "bloop".into(),
+ })),
+ path: Some(Literal::Regex(LiteralInner {
+ start: 47,
+ end: 53,
+ content: "server".into(),
+ })),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 54,
+ end: 57,
+ content: "foo".into(),
+ }))),
..Query::default()
},
Query {
global_regex: Some(true),
- repo: Some(Literal::Regex("google".into())),
- target: Some(Target::Content(Literal::Regex("bar".into()))),
+ repo: Some(Literal::Regex(LiteralInner {
+ start: 66,
+ end: 72,
+ content: "google".into(),
+ })),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 73,
+ end: 76,
+ content: "bar".into(),
+ }))),
..Query::default()
},
],
@@ -947,12 +1287,20 @@ mod tests {
vec![
Query {
global_regex: Some(false),
- target: Some(Target::Content(Literal::Plain("foo".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 18,
+ end: 21,
+ content: "foo".into(),
+ }))),
..Query::default()
},
Query {
global_regex: Some(false),
- target: Some(Target::Content(Literal::Plain("bar".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 25,
+ end: 28,
+ content: "bar".into(),
+ }))),
..Query::default()
},
],
@@ -968,26 +1316,24 @@ mod tests {
vec![
Query {
case_sensitive: Some(false),
- target: Some(Target::Content(Literal::Plain("foo".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "foo".into()
+ }))),
..Query::default()
},
Query {
case_sensitive: Some(false),
- target: Some(Target::Content(Literal::Plain("bar".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 7,
+ end: 10,
+ content: "bar".into()
+ }))),
..Query::default()
},
],
);
-
- assert_eq!(
- parse("foo or bar case:ignore").unwrap(),
- parse("case:ignore foo or bar").unwrap(),
- );
-
- assert_eq!(
- parse("foo or bar case:ignore").unwrap(),
- parse("case:sensitive foo or bar case:ignore").unwrap(),
- );
}
#[test]
@@ -995,7 +1341,11 @@ mod tests {
assert_eq!(
parse("org").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("org".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "org".into()
+ }))),
..Query::default()
},],
);
@@ -1004,11 +1354,19 @@ mod tests {
parse("org or orange").unwrap(),
vec![
Query {
- target: Some(Target::Content(Literal::Plain("org".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "org".into()
+ }))),
..Query::default()
},
Query {
- target: Some(Target::Content(Literal::Plain("orange".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 7,
+ end: 13,
+ content: "orange".into()
+ }))),
..Query::default()
},
],
@@ -1020,7 +1378,11 @@ mod tests {
assert_eq!(
parse("for").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("for".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "for".into()
+ }))),
..Query::default()
},],
);
@@ -1029,11 +1391,19 @@ mod tests {
parse("for or error").unwrap(),
vec![
Query {
- target: Some(Target::Content(Literal::Plain("for".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 0,
+ end: 3,
+ content: "for".into()
+ }))),
..Query::default()
},
Query {
- target: Some(Target::Content(Literal::Plain("error".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 7,
+ end: 12,
+ content: "error".into()
+ }))),
..Query::default()
},
],
@@ -1059,9 +1429,20 @@ mod tests {
assert_eq!(
parse_nl("what is background color? lang:tsx repo:bloop").unwrap(),
SemanticQuery {
- target: Some(Literal::Plain("what is background color?".into())),
- langs: ["tsx".into()].into(),
- repos: [Literal::Plain("bloop".into())].into(),
+ raw_query: "what is background color? lang:tsx repo:bloop".to_string(),
+ target: Some(Literal::Plain("what is background color? tsx".into())),
+ langs: [Literal::Plain(LiteralInner {
+ start: 31,
+ end: 34,
+ content: "tsx".into()
+ })]
+ .into(),
+ repos: [Literal::Plain(LiteralInner {
+ start: 40,
+ end: 45,
+ content: "bloop".into()
+ })]
+ .into(),
paths: [].into(),
branch: [].into()
},
@@ -1071,26 +1452,55 @@ mod tests {
#[test]
fn nl_parse_dedup_similar_filters() {
let q = parse_nl("what is background color? lang:tsx repo:bloop repo:bloop").unwrap();
- assert_eq!(q.repos().count(), 1);
+ assert_eq!(q.repos().count(), 2);
}
#[test]
fn nl_parse_multiple_filters() {
assert_eq!(
- parse_nl("what is background color? lang:tsx lang:ts repo:bloop repo:bar path:server/bleep repo:baz")
- .unwrap(),
+ parse_nl("what is background color? lang:tsx lang:ts repo:bloop repo:bar path:server/bleep repo:baz").unwrap(),
SemanticQuery {
- target: Some(Literal::Plain("what is background color?".into())),
- langs: ["tsx".into(), "typescript".into()].into(),
+ raw_query: "what is background color? lang:tsx lang:ts repo:bloop repo:bar path:server/bleep repo:baz".to_string(),
+ target: Some(Literal::Plain("what is background color? tsx typescript server/bleep".into())),
+ langs: [
+ Literal::Plain(LiteralInner {
+ start: 31,
+ end: 34,
+ content: "tsx".into()
+ }),
+ Literal::Plain(LiteralInner {
+ start: 40,
+ end: 42,
+ content: "typescript".into()
+ })
+ ]
+ .into(),
branch: [].into(),
repos: [
- Literal::Plain("bloop".into()),
- Literal::Plain("bar".into()),
- Literal::Plain("baz".into())
+ Literal::Plain(LiteralInner {
+ start: 48,
+ end: 53,
+ content: "bloop".into(),
+ }),
+ Literal::Plain(LiteralInner {
+ start: 59,
+ end: 62,
+ content: "bar".into(),
+ }),
+ Literal::Plain(LiteralInner {
+ start: 86,
+ end: 89,
+ content: "baz".into(),
+ }),
]
.into(),
- paths: [Literal::Plain("server/bleep".into())].into(),
- }
+ paths: [Literal::Plain(LiteralInner {
+ start: 68,
+ end: 80,
+ content: "server/bleep".into(),
+ })]
+ .into(),
+ },
);
}
@@ -1098,13 +1508,26 @@ mod tests {
fn nl_consume_flags() {
assert_eq!(
parse_nl(
- "what is background color? lang:tsx repo:bloop org:bloop symbol:foo open:true"
+ "what is background color of lang:tsx files? repo:bloop org:bloop symbol:foo open:true"
)
.unwrap(),
SemanticQuery {
- target: Some(Literal::Plain("what is background color?".into())),
- langs: ["tsx".into()].into(),
- repos: [Literal::Plain("bloop".into())].into(),
+ raw_query:
+ "what is background color of lang:tsx files? repo:bloop org:bloop symbol:foo open:true"
+ .to_string(),
+ target: Some(Literal::Plain("what is background color of tsx files?".into())),
+ langs: [Literal::Plain(LiteralInner {
+ start: 33,
+ end: 36,
+ content: "tsx".into()
+ })]
+ .into(),
+ repos: [Literal::Plain(LiteralInner {
+ start: 49,
+ end: 54,
+ content: "bloop".into()
+ })]
+ .into(),
paths: [].into(),
branch: [].into(),
}
@@ -1113,10 +1536,17 @@ mod tests {
assert_eq!(
parse_nl("case:ignore why are languages excluded from ctags? branch:main").unwrap(),
SemanticQuery {
+ raw_query: "case:ignore why are languages excluded from ctags? branch:main"
+ .to_string(),
target: Some(Literal::Plain(
"why are languages excluded from ctags?".into()
)),
- branch: [Literal::Plain("main".into())].into(),
+ branch: [Literal::Plain(LiteralInner {
+ start: 58,
+ end: 62,
+ content: "main".into()
+ })]
+ .into(),
..Default::default()
}
);
@@ -1140,7 +1570,11 @@ mod tests {
assert_eq!(
parse("'foo\\'bar'").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("foo'bar".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 1,
+ end: 9,
+ content: "foo'bar".into()
+ }))),
..Query::default()
}],
);
@@ -1148,7 +1582,11 @@ mod tests {
assert_eq!(
parse(r#""foo\"bar""#).unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Plain("foo\"bar".into()))),
+ target: Some(Target::Content(Literal::Plain(LiteralInner {
+ start: 1,
+ end: 9,
+ content: "foo\"bar".into()
+ }))),
..Query::default()
}],
);
@@ -1156,7 +1594,11 @@ mod tests {
assert_eq!(
parse("/foo\\/bar/").unwrap(),
vec![Query {
- target: Some(Target::Content(Literal::Regex("foo/bar".into()))),
+ target: Some(Target::Content(Literal::Regex(LiteralInner {
+ start: 1,
+ end: 9,
+ content: "foo/bar".into()
+ }))),
..Query::default()
}],
);
diff --git a/server/bleep/src/webserver/answer.rs b/server/bleep/src/webserver/answer.rs
index 8a4ee4f65e..5b7b343cdf 100644
--- a/server/bleep/src/webserver/answer.rs
+++ b/server/bleep/src/webserver/answer.rs
@@ -407,9 +407,7 @@ pub async fn explain(
.into_owned();
if let Some(branch) = params.branch {
- query
- .branch
- .insert(Literal::Plain(std::borrow::Cow::Owned(branch)));
+ query.branch.push(Literal::Plain(branch.into()));
}
let file_content = app
diff --git a/server/bleep/src/webserver/autocomplete.rs b/server/bleep/src/webserver/autocomplete.rs
index 16561a3eb4..b6afd07085 100644
--- a/server/bleep/src/webserver/autocomplete.rs
+++ b/server/bleep/src/webserver/autocomplete.rs
@@ -1,4 +1,4 @@
-use std::sync::Arc;
+use std::{collections::HashMap, sync::Arc};
use super::prelude::*;
use crate::{
@@ -8,7 +8,7 @@ use crate::{
},
query::{
execute::{ApiQuery, ExecuteQuery, QueryResult},
- parser,
+ languages, parser,
parser::{Literal, Target},
},
};
@@ -17,15 +17,88 @@ use axum::{extract::Query, response::IntoResponse as IntoAxumResponse, Extension
use futures::{stream, StreamExt, TryStreamExt};
use serde::Serialize;
+fn default_true() -> bool {
+ true
+}
+
+#[derive(Deserialize)]
+pub struct AutocompleteParams {
+ #[serde(default = "default_true")]
+ content: bool,
+ #[serde(default = "default_true")]
+ file: bool,
+ #[serde(default = "default_true")]
+ repo: bool,
+ #[serde(default = "default_true")]
+ lang: bool,
+}
+
pub(super) async fn handle(
Query(mut api_params): Query,
+ Query(ac_params): Query,
Extension(indexes): Extension>,
) -> Result {
// Override page_size and set to low value
api_params.page = 0;
- api_params.page_size = 3;
-
- let queries = parser::parse(&api_params.q).map_err(Error::user)?;
+ api_params.page_size = 8;
+
+ let mut partial_lang = None;
+ let mut has_target = false;
+
+ let queries = parser::parse(&api_params.q)
+ .map_err(Error::user)?
+ .into_iter()
+ .map(|mut q| {
+ let keywords = &["lang:", "path:", "repo:"];
+
+ if ac_params.content {
+ if let Some(ref t) = q.target {
+ if !keywords.iter().any(|k| k == t.literal().as_ref()) {
+ has_target = true;
+ }
+ }
+
+ let target = q
+ .target
+ .get_or_insert_with(|| Target::Content(Literal::Regex(".*".into())));
+
+ for keyword in keywords {
+ if let Some(pos) = target.literal().find(keyword) {
+ let new = format!(
+ "{}{}",
+ &target.literal()[..pos],
+ &target.literal()[pos + keyword.len()..]
+ );
+
+ *target = Target::Content(Literal::Regex(if new.is_empty() {
+ ".*".into()
+ } else {
+ new.into()
+ }));
+ }
+ }
+ } else {
+ q.target = None;
+ }
+
+ if let Some(lang) = q.lang.as_ref() {
+ partial_lang = q.lang.as_ref().map(|l| l.to_lowercase());
+ if languages::list()
+ .filter(|l| l.to_lowercase() == lang.as_ref().to_lowercase())
+ .count()
+ == 0
+ {
+ q.lang = None;
+ }
+ }
+
+ if q.path.is_none() && ac_params.file {
+ q.path = Some(Literal::Regex(".*".into()));
+ }
+
+ q
+ })
+ .collect::>();
let mut autocomplete_results = vec![];
// Only execute prefix search on flag names if there is a non-regex content target.
@@ -41,44 +114,74 @@ pub(super) async fn handle(
);
}
- // Bypass the parser and execute a prefix search using the last whitespace-split token
- // in the query string.
- //
- // This should be revisited when we implement cursor-aware autocomplete.
- //
- // `api lang:p` -> search lang list with prefix `p`
- // `lang:p api` -> lang prefix search not triggered
- if let Some(matched_langs) = complete_lang(&api_params.q) {
- autocomplete_results.append(
- &mut matched_langs
- .map(|l| QueryResult::Lang(l.to_string()))
- .collect(),
- );
+ let mut engines = vec![];
+ if ac_params.content {
+ engines.push(ContentReader.execute(&indexes.file, &queries, &api_params));
}
- // If no flags completion, run a search with full query
- if autocomplete_results.is_empty() {
- let contents = ContentReader.execute(&indexes.file, &queries, &api_params);
- let repos = RepoReader.execute(&indexes.repo, &queries, &api_params);
- let files = FileReader.execute(&indexes.file, &queries, &api_params);
-
- autocomplete_results = stream::iter([contents, repos, files])
- // Buffer several readers at the same time. The exact number is not important; this is
- // simply an upper bound.
- .buffered(10)
- .try_fold(Vec::new(), |mut a, e| async {
- a.extend(e.data.into_iter());
- Ok(a)
- })
- .await
- .map_err(Error::internal)?;
+ if ac_params.repo {
+ engines.push(RepoReader.execute(&indexes.repo, &queries, &api_params));
}
- let count = autocomplete_results.len();
- let data = autocomplete_results;
- let response = AutocompleteResponse { count, data };
+ if ac_params.file {
+ engines.push(FileReader.execute(&indexes.file, &queries, &api_params));
+ }
- Ok(json(response))
+ let (langs, list) = stream::iter(engines)
+ // Buffer several readers at the same time. The exact number is not important; this is
+ // simply an upper bound.
+ .buffered(10)
+ .try_fold(
+ (HashMap::::new(), Vec::new()),
+ |(mut langs, mut list), e| async {
+ for (lang, count) in e.stats.lang {
+ // The exact number here isn't relevant, and
+ // this may be off.
+ //
+ // We're trying to scale the results compared
+ // to each other which means this will still
+ // serve the purpose for ranking.
+ *langs.entry(lang).or_default() += count;
+ }
+ list.extend(e.data.into_iter());
+ Ok((langs, list))
+ },
+ )
+ .await
+ .map_err(Error::internal)?;
+
+ autocomplete_results.extend(
+ list.into_iter()
+ .filter(|q| has_target || !matches!(q, QueryResult::Snippets(_))),
+ );
+
+ if ac_params.lang && api_params.q.contains("lang:") {
+ let mut ranked_langs = langs.into_iter().collect::>();
+ if let Some(partial) = partial_lang {
+ ranked_langs.retain(|(l, _)| l.to_lowercase().contains(&partial));
+
+ if ranked_langs.is_empty() {
+ ranked_langs.extend(
+ languages::list()
+ .filter(|l| l.to_lowercase().starts_with(&partial))
+ .map(|l| (l.to_lowercase(), 0)),
+ );
+
+ ranked_langs.sort_by(|(a, _), (b, _)| a.len().cmp(&b.len()));
+ ranked_langs.truncate(5);
+ }
+ }
+
+ ranked_langs.sort_by(|(_, a_count), (_, b_count)| b_count.cmp(a_count));
+ ranked_langs.truncate(5);
+
+ autocomplete_results.extend(ranked_langs.into_iter().map(|(l, _)| QueryResult::Lang(l)));
+ }
+
+ Ok(json(AutocompleteResponse {
+ count: autocomplete_results.len(),
+ data: autocomplete_results,
+ }))
}
fn complete_flag(q: &str) -> impl Iterator
- + '_ {
@@ -88,18 +191,6 @@ fn complete_flag(q: &str) -> impl Iterator
- + '_ {
.copied()
}
-fn complete_lang(q: &str) -> Option + '_> {
- match q.split_whitespace().last() {
- Some(last) => last.strip_prefix("lang:").map(|prefix| {
- COMMON_LANGUAGES
- .iter()
- .filter(move |l| l.starts_with(prefix))
- .copied()
- }),
- _ => None,
- }
-}
-
#[derive(Serialize)]
pub(super) struct AutocompleteResponse {
count: usize,
@@ -111,73 +202,3 @@ impl super::ApiResponse for AutocompleteResponse {}
const QUERY_FLAGS: &[&str; 8] = &[
"repo", "path", "content", "symbol", "lang", "case", "or", "open",
];
-
-// List of common languages
-const COMMON_LANGUAGES: &[&str] = &[
- "webassembly",
- "basic",
- "makefile",
- "groovy",
- "haskell",
- "idris",
- "typescript",
- "r",
- "javascript",
- "llvm",
- "jsonnet",
- "lua",
- "awk",
- "solidity",
- "nim",
- "hcl",
- "julia",
- "ada",
- "verilog",
- "python",
- "go",
- "sql",
- "plsql",
- "fortran",
- "erlang",
- "mathematica",
- "rust",
- "coffeescript",
- "zig",
- "scala",
- "tsx",
- "ruby",
- "apl",
- "c",
- "tcl",
- "kotlin",
- "vba",
- "matlab",
- "hack",
- "ocaml",
- "prolog",
- "scheme",
- "dockerfile",
- "assembly",
- "clojure",
- "shell",
- "java",
- "c++",
- "php",
- "perl",
- "vbscript",
- "d",
- "pascal",
- "elm",
- "swift",
- "cuda",
- "dart",
- "elixir",
- "c#",
- "objective-c",
- "coq",
- "forth",
- "cmake",
- "nix",
- "objective-c++",
- "actionscript",
-];
diff --git a/server/bleep/src/webserver/search.rs b/server/bleep/src/webserver/search.rs
index ce143e15dd..d49a2b37c8 100644
--- a/server/bleep/src/webserver/search.rs
+++ b/server/bleep/src/webserver/search.rs
@@ -63,6 +63,7 @@ pub(super) async fn fuzzy_path(
c.lang,
c.branches,
c.indexed,
+ c.is_dir,
))
})
.collect::>();