mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
# Backport This will backport the following commits from `main` to `8.x`: - [[ES|QL] Comment parsing and pretty-printing (#192173)](https://github.com/elastic/kibana/pull/192173) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Vadim Kibana","email":"82822460+vadimkibana@users.noreply.github.com"},"sourceCommit":{"committedDate":"2024-09-26T10:34:38Z","message":"[ES|QL] Comment parsing and pretty-printing (#192173)\n\n## Summary\r\n\r\nTL;DR\r\n\r\n- Adds ability to parse out comments from source to AST.\r\n- Adds ability for every AST node to have *decoration*—comments,\r\nwhich can be attached from left, top, and right from the node.\r\n- Implements routine which attached comments to AST nodes.\r\n- In `BasicPrettyPrinter` adds support only for *left* and *right*\r\ncomment printing, as the basic printer prints only on one line.\r\n- In `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes.\r\n- Introduces a `Query` object and `query` AST node, which represent\r\nthole query—the root node, list of commands.\r\n- The ES|QL AST example plugin now displays the pretty-printed text\r\nversion.\r\n\r\n\r\n### Comments\r\n\r\nThis PR introduced an optional `formatting` field for all AST nodes. In\r\nthe `formatting` field one can specify comment decorations from\r\ndifferent sides of a node.\r\n\r\nWhen parsing, once can now specify the `{ withComments: true }` option,\r\nwhich will collect all comments from the source while parsing using the\r\n`collectDecorations` routine. It will then also call the\r\n`attachDecorations`, which walks the AST and assigns each comment to\r\nsome AST node.\r\n\r\nFurther, traversal and pretty-print API have been updated to work with\r\ncomments:\r\n\r\n- The `Walker` has been updated to be able to walk all comments from the\r\nAST.\r\n- The `BasicPrettyPrinter` adds support only for *left* and *right*\r\ninline comment printing, as the basic printer prints only on one line.\r\n- The `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes. It switches to line-break printing mode if it detects\r\nthere are comments with line breaks (those could be multi-line comments,\r\nor single line comments—single line comments are always followed\r\nby a line break). It also correctly inserts punctuation, when an AST\r\nnode is surrounded by comments.\r\n\r\n\r\n### Parsing utils\r\n\r\nAll parsing utils have been moved to the `/parser` sub-folder.\r\n\r\nFiles in the `/parser` folder have been renamed as per Kibana convention\r\nto reflect what is inside the file. For example, the `EsqlErrorListener`\r\nclass is in a file named `esql_error_listener.ts`.\r\n\r\nA `Query` class and `ESQLAstQueryExpression` AST nodes have been\r\nintroduced. They represent the result of a full query parse. (Before\r\nthat, the AST root was just an array of command nodes, now the AST root\r\nis represented by the `ESQLAstQueryExpression` node.)\r\n\r\n\r\n### Builder\r\n\r\nI have started the implementation of the `Builder` static class in the\r\n`/builder` folder. It is simply a collection of stateless AST node\r\nfactories—functions which construct AST nodes.\r\n\r\nSome of the `Builder` methods are already used by the parser, more will\r\nfollow. We will also use the `Builder` in upcoming [*Mutation\r\nAPI*](https://github.com/elastic/kibana/issues/191812).\r\n\r\n\r\n### ES|QL Example Plugin\r\n\r\nThis PR sets up Storybook and implements few Storybook stories for the\r\nES|QL AST example plugin, run it with:\r\n\r\n```\r\nyarn storybook esql_ast_inspector\r\n```\r\n\r\nThis PR updates the *ES|QL AST Explorer* example plugin. Start Kibana\r\nwith example plugins enabled:\r\n\r\n```\r\nyarn start --run-examples\r\n```\r\n\r\nAnd navigate to\r\n[`/app/esql_ast_inspector`](http://localhost:5601/app/esql_ast_inspector)\r\nto see the new example plugin UI.\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Checklist\r\n\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n\r\n\r\n### For maintainers\r\n\r\n- [x] This was checked for breaking API changes and was [labeled\r\nappropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>\r\nCo-authored-by: Stratoula Kalafateli <efstratia.kalafateli@elastic.co>","sha":"2217337c5d91340ba67e0bedaab0762502518993","branchLabelMapping":{"^v9.0.0$":"main","^v8.16.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["review","release_note:skip","v9.0.0","backport:prev-minor","Feature:ES|QL","Team:ESQL","v8.16.0"],"title":"[ES|QL] Comment parsing and pretty-printing","number":192173,"url":"https://github.com/elastic/kibana/pull/192173","mergeCommit":{"message":"[ES|QL] Comment parsing and pretty-printing (#192173)\n\n## Summary\r\n\r\nTL;DR\r\n\r\n- Adds ability to parse out comments from source to AST.\r\n- Adds ability for every AST node to have *decoration*—comments,\r\nwhich can be attached from left, top, and right from the node.\r\n- Implements routine which attached comments to AST nodes.\r\n- In `BasicPrettyPrinter` adds support only for *left* and *right*\r\ncomment printing, as the basic printer prints only on one line.\r\n- In `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes.\r\n- Introduces a `Query` object and `query` AST node, which represent\r\nthole query—the root node, list of commands.\r\n- The ES|QL AST example plugin now displays the pretty-printed text\r\nversion.\r\n\r\n\r\n### Comments\r\n\r\nThis PR introduced an optional `formatting` field for all AST nodes. In\r\nthe `formatting` field one can specify comment decorations from\r\ndifferent sides of a node.\r\n\r\nWhen parsing, once can now specify the `{ withComments: true }` option,\r\nwhich will collect all comments from the source while parsing using the\r\n`collectDecorations` routine. It will then also call the\r\n`attachDecorations`, which walks the AST and assigns each comment to\r\nsome AST node.\r\n\r\nFurther, traversal and pretty-print API have been updated to work with\r\ncomments:\r\n\r\n- The `Walker` has been updated to be able to walk all comments from the\r\nAST.\r\n- The `BasicPrettyPrinter` adds support only for *left* and *right*\r\ninline comment printing, as the basic printer prints only on one line.\r\n- The `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes. It switches to line-break printing mode if it detects\r\nthere are comments with line breaks (those could be multi-line comments,\r\nor single line comments—single line comments are always followed\r\nby a line break). It also correctly inserts punctuation, when an AST\r\nnode is surrounded by comments.\r\n\r\n\r\n### Parsing utils\r\n\r\nAll parsing utils have been moved to the `/parser` sub-folder.\r\n\r\nFiles in the `/parser` folder have been renamed as per Kibana convention\r\nto reflect what is inside the file. For example, the `EsqlErrorListener`\r\nclass is in a file named `esql_error_listener.ts`.\r\n\r\nA `Query` class and `ESQLAstQueryExpression` AST nodes have been\r\nintroduced. They represent the result of a full query parse. (Before\r\nthat, the AST root was just an array of command nodes, now the AST root\r\nis represented by the `ESQLAstQueryExpression` node.)\r\n\r\n\r\n### Builder\r\n\r\nI have started the implementation of the `Builder` static class in the\r\n`/builder` folder. It is simply a collection of stateless AST node\r\nfactories—functions which construct AST nodes.\r\n\r\nSome of the `Builder` methods are already used by the parser, more will\r\nfollow. We will also use the `Builder` in upcoming [*Mutation\r\nAPI*](https://github.com/elastic/kibana/issues/191812).\r\n\r\n\r\n### ES|QL Example Plugin\r\n\r\nThis PR sets up Storybook and implements few Storybook stories for the\r\nES|QL AST example plugin, run it with:\r\n\r\n```\r\nyarn storybook esql_ast_inspector\r\n```\r\n\r\nThis PR updates the *ES|QL AST Explorer* example plugin. Start Kibana\r\nwith example plugins enabled:\r\n\r\n```\r\nyarn start --run-examples\r\n```\r\n\r\nAnd navigate to\r\n[`/app/esql_ast_inspector`](http://localhost:5601/app/esql_ast_inspector)\r\nto see the new example plugin UI.\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Checklist\r\n\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n\r\n\r\n### For maintainers\r\n\r\n- [x] This was checked for breaking API changes and was [labeled\r\nappropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>\r\nCo-authored-by: Stratoula Kalafateli <efstratia.kalafateli@elastic.co>","sha":"2217337c5d91340ba67e0bedaab0762502518993"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","branchLabelMappingKey":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/192173","number":192173,"mergeCommit":{"message":"[ES|QL] Comment parsing and pretty-printing (#192173)\n\n## Summary\r\n\r\nTL;DR\r\n\r\n- Adds ability to parse out comments from source to AST.\r\n- Adds ability for every AST node to have *decoration*—comments,\r\nwhich can be attached from left, top, and right from the node.\r\n- Implements routine which attached comments to AST nodes.\r\n- In `BasicPrettyPrinter` adds support only for *left* and *right*\r\ncomment printing, as the basic printer prints only on one line.\r\n- In `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes.\r\n- Introduces a `Query` object and `query` AST node, which represent\r\nthole query—the root node, list of commands.\r\n- The ES|QL AST example plugin now displays the pretty-printed text\r\nversion.\r\n\r\n\r\n### Comments\r\n\r\nThis PR introduced an optional `formatting` field for all AST nodes. In\r\nthe `formatting` field one can specify comment decorations from\r\ndifferent sides of a node.\r\n\r\nWhen parsing, once can now specify the `{ withComments: true }` option,\r\nwhich will collect all comments from the source while parsing using the\r\n`collectDecorations` routine. It will then also call the\r\n`attachDecorations`, which walks the AST and assigns each comment to\r\nsome AST node.\r\n\r\nFurther, traversal and pretty-print API have been updated to work with\r\ncomments:\r\n\r\n- The `Walker` has been updated to be able to walk all comments from the\r\nAST.\r\n- The `BasicPrettyPrinter` adds support only for *left* and *right*\r\ninline comment printing, as the basic printer prints only on one line.\r\n- The `WrappingPrettyPrinter` adds support for all comment printing for\r\nall AST nodes. It switches to line-break printing mode if it detects\r\nthere are comments with line breaks (those could be multi-line comments,\r\nor single line comments—single line comments are always followed\r\nby a line break). It also correctly inserts punctuation, when an AST\r\nnode is surrounded by comments.\r\n\r\n\r\n### Parsing utils\r\n\r\nAll parsing utils have been moved to the `/parser` sub-folder.\r\n\r\nFiles in the `/parser` folder have been renamed as per Kibana convention\r\nto reflect what is inside the file. For example, the `EsqlErrorListener`\r\nclass is in a file named `esql_error_listener.ts`.\r\n\r\nA `Query` class and `ESQLAstQueryExpression` AST nodes have been\r\nintroduced. They represent the result of a full query parse. (Before\r\nthat, the AST root was just an array of command nodes, now the AST root\r\nis represented by the `ESQLAstQueryExpression` node.)\r\n\r\n\r\n### Builder\r\n\r\nI have started the implementation of the `Builder` static class in the\r\n`/builder` folder. It is simply a collection of stateless AST node\r\nfactories—functions which construct AST nodes.\r\n\r\nSome of the `Builder` methods are already used by the parser, more will\r\nfollow. We will also use the `Builder` in upcoming [*Mutation\r\nAPI*](https://github.com/elastic/kibana/issues/191812).\r\n\r\n\r\n### ES|QL Example Plugin\r\n\r\nThis PR sets up Storybook and implements few Storybook stories for the\r\nES|QL AST example plugin, run it with:\r\n\r\n```\r\nyarn storybook esql_ast_inspector\r\n```\r\n\r\nThis PR updates the *ES|QL AST Explorer* example plugin. Start Kibana\r\nwith example plugins enabled:\r\n\r\n```\r\nyarn start --run-examples\r\n```\r\n\r\nAnd navigate to\r\n[`/app/esql_ast_inspector`](http://localhost:5601/app/esql_ast_inspector)\r\nto see the new example plugin UI.\r\n\r\n\r\n\r\n\r\n\r\n\r\n### Checklist\r\n\r\n- [x] [Unit or functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere updated or added to match the most common scenarios\r\n\r\n\r\n### For maintainers\r\n\r\n- [x] This was checked for breaking API changes and was [labeled\r\nappropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>\r\nCo-authored-by: Stratoula Kalafateli <efstratia.kalafateli@elastic.co>","sha":"2217337c5d91340ba67e0bedaab0762502518993"}},{"branch":"8.x","label":"v8.16.0","branchLabelMappingKey":"^v8.16.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}] BACKPORT--> Co-authored-by: Vadim Kibana <82822460+vadimkibana@users.noreply.github.com>
This commit is contained in:
parent
ba9a67ef96
commit
2fd6817a75
87 changed files with 5329 additions and 606 deletions
10
examples/esql_ast_inspector/.storybook/main.js
Normal file
10
examples/esql_ast_inspector/.storybook/main.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
module.exports = require('@kbn/storybook').defaultConfig;
|
|
@ -7,84 +7,21 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import React, { useRef, useState } from 'react';
|
||||
import {
|
||||
EuiPage,
|
||||
EuiPageBody,
|
||||
EuiPageSection,
|
||||
EuiPageHeader,
|
||||
EuiSpacer,
|
||||
EuiForm,
|
||||
EuiTextArea,
|
||||
EuiFormRow,
|
||||
EuiButton,
|
||||
} from '@elastic/eui';
|
||||
import * as React from 'react';
|
||||
import { EuiPage, EuiPageBody, EuiPageSection, EuiPageHeader, EuiSpacer } from '@elastic/eui';
|
||||
import { EuiProvider } from '@elastic/eui';
|
||||
import { EsqlInspector } from './components/esql_inspector';
|
||||
|
||||
import type { CoreStart } from '@kbn/core/public';
|
||||
|
||||
import { EditorError, ESQLAst, getAstAndSyntaxErrors } from '@kbn/esql-ast';
|
||||
import { CodeEditor } from '@kbn/code-editor';
|
||||
import type { StartDependencies } from './plugin';
|
||||
|
||||
export const App = (props: { core: CoreStart; plugins: StartDependencies }) => {
|
||||
const [currentErrors, setErrors] = useState<EditorError[]>([]);
|
||||
const [currentQuery, setQuery] = useState(
|
||||
'from index1 | eval var0 = round(numberField, 2) | stats by stringField'
|
||||
);
|
||||
|
||||
const inputRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
|
||||
const [ast, setAST] = useState<ESQLAst>(getAstAndSyntaxErrors(currentQuery).ast);
|
||||
|
||||
const parseQuery = (query: string) => {
|
||||
const { ast: _ast, errors } = getAstAndSyntaxErrors(query);
|
||||
setErrors(errors);
|
||||
setAST(_ast);
|
||||
};
|
||||
|
||||
export const App = () => {
|
||||
return (
|
||||
<EuiProvider>
|
||||
<EuiPage>
|
||||
<EuiPageBody style={{ maxWidth: 800, margin: '0 auto' }}>
|
||||
<EuiPageBody style={{ maxWidth: 1200, margin: '0 auto' }}>
|
||||
<EuiPageHeader paddingSize="s" bottomBorder={true} pageTitle="ES|QL AST Inspector" />
|
||||
<EuiPageSection paddingSize="s">
|
||||
<p>This app gives you the AST for a particular ES|QL query.</p>
|
||||
|
||||
<EuiSpacer />
|
||||
|
||||
<EuiForm>
|
||||
<EuiFormRow
|
||||
fullWidth
|
||||
label="Query"
|
||||
isInvalid={Boolean(currentErrors.length)}
|
||||
error={currentErrors.map((error) => error.message)}
|
||||
>
|
||||
<EuiTextArea
|
||||
inputRef={(node) => {
|
||||
inputRef.current = node;
|
||||
}}
|
||||
isInvalid={Boolean(currentErrors.length)}
|
||||
fullWidth
|
||||
value={currentQuery}
|
||||
onChange={(e) => setQuery(e.target.value)}
|
||||
css={{
|
||||
height: '5em',
|
||||
}}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
<EuiFormRow fullWidth>
|
||||
<EuiButton fullWidth onClick={() => parseQuery(inputRef.current?.value ?? '')}>
|
||||
Parse
|
||||
</EuiButton>
|
||||
</EuiFormRow>
|
||||
</EuiForm>
|
||||
<EuiSpacer />
|
||||
<CodeEditor
|
||||
allowFullScreen={true}
|
||||
languageId={'json'}
|
||||
value={JSON.stringify(ast, null, 2)}
|
||||
/>
|
||||
<EsqlInspector />
|
||||
</EuiPageSection>
|
||||
</EuiPageBody>
|
||||
</EuiPage>
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { Annotations } from './annotations';
|
||||
|
||||
export default {
|
||||
title: '<Annotations>',
|
||||
parameters: {},
|
||||
};
|
||||
|
||||
export const Default = () => (
|
||||
<Annotations
|
||||
value={'FROM index | LIMIT 10 | SORT some_field'}
|
||||
annotations={[
|
||||
[0, 4, (text) => <span style={{ color: 'red' }}>{text}</span>],
|
||||
[5, 10, (text) => <span style={{ color: 'blue' }}>{text}</span>],
|
||||
[13, 18, (text) => <span style={{ color: 'red' }}>{text}</span>],
|
||||
[19, 21, (text) => <span style={{ color: 'green' }}>{text}</span>],
|
||||
]}
|
||||
/>
|
||||
);
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import type { Annotation } from './types';
|
||||
|
||||
export interface AnnotationsProps {
|
||||
value: string;
|
||||
annotations?: Annotation[];
|
||||
}
|
||||
|
||||
export const Annotations: React.FC<AnnotationsProps> = (props) => {
|
||||
const { value, annotations = [] } = props;
|
||||
const annotationNodes: React.ReactNode[] = [];
|
||||
|
||||
let pos = 0;
|
||||
|
||||
for (const [start, end, render] of annotations) {
|
||||
if (start > pos) {
|
||||
const text = value.slice(pos, start);
|
||||
|
||||
annotationNodes.push(<span>{text}</span>);
|
||||
}
|
||||
|
||||
const text = value.slice(start, end);
|
||||
|
||||
pos = end;
|
||||
annotationNodes.push(render(text));
|
||||
}
|
||||
|
||||
if (pos < value.length) {
|
||||
const text = value.slice(pos);
|
||||
annotationNodes.push(<span>{text}</span>);
|
||||
}
|
||||
|
||||
return React.createElement('span', {}, ...annotationNodes);
|
||||
};
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { Annotations, type AnnotationsProps } from './annotations';
|
||||
export type { Annotation } from './types';
|
|
@ -7,19 +7,10 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import type { Token } from 'antlr4';
|
||||
import * as React from 'react';
|
||||
|
||||
export function getPosition(
|
||||
token: Pick<Token, 'start' | 'stop'> | null,
|
||||
lastToken?: Pick<Token, 'stop'> | undefined
|
||||
) {
|
||||
if (!token || token.start < 0) {
|
||||
return { min: 0, max: 0 };
|
||||
}
|
||||
const endFirstToken = token.stop > -1 ? Math.max(token.stop + 1, token.start) : undefined;
|
||||
const endLastToken = lastToken?.stop;
|
||||
return {
|
||||
min: token.start,
|
||||
max: endLastToken ?? endFirstToken ?? Infinity,
|
||||
};
|
||||
}
|
||||
export type Annotation = [
|
||||
start: number,
|
||||
end: number,
|
||||
annotation: (text: string) => React.ReactNode
|
||||
];
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { css } from '@emotion/react';
|
||||
import { Annotations, type Annotation } from '../annotations';
|
||||
import { FlexibleInput } from '../flexible_input/flexible_input';
|
||||
|
||||
const blockCss = css({
|
||||
display: 'inline-block',
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
fontSize: '18px',
|
||||
lineHeight: '1.3',
|
||||
fontFamily:
|
||||
"'SF Mono', SFMono-Regular, ui-monospace, 'DejaVu Sans Mono', Menlo, Consolas, monospace",
|
||||
});
|
||||
|
||||
const backdropCss = css({
|
||||
display: 'inline-block',
|
||||
position: 'absolute',
|
||||
left: 0,
|
||||
width: '100%',
|
||||
pointerEvents: 'all',
|
||||
userSelect: 'none',
|
||||
whiteSpace: 'pre',
|
||||
color: 'rgba(255, 255, 255, 0.01)',
|
||||
});
|
||||
|
||||
const inputCss = css({
|
||||
display: 'inline-block',
|
||||
color: 'rgba(255, 255, 255, 0.01)',
|
||||
caretColor: '#07f',
|
||||
});
|
||||
|
||||
const overlayCss = css({
|
||||
display: 'inline-block',
|
||||
position: 'absolute',
|
||||
left: 0,
|
||||
width: '100%',
|
||||
pointerEvents: 'none',
|
||||
userSelect: 'none',
|
||||
whiteSpace: 'pre',
|
||||
});
|
||||
|
||||
export interface EsqlEditorProps {
|
||||
src: string;
|
||||
backdrops?: Annotation[][];
|
||||
highlight?: Annotation[];
|
||||
onChange: (src: string) => void;
|
||||
}
|
||||
|
||||
export const EsqlEditor: React.FC<EsqlEditorProps> = (props) => {
|
||||
const { src, highlight, onChange } = props;
|
||||
|
||||
const backdrops: React.ReactNode[] = [];
|
||||
|
||||
if (props.backdrops) {
|
||||
for (let i = 0; i < props.backdrops.length; i++) {
|
||||
const backdrop = props.backdrops[i];
|
||||
|
||||
backdrops.push(
|
||||
<div key={i} css={backdropCss}>
|
||||
<Annotations value={src} annotations={backdrop} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const overlay = !!highlight && (
|
||||
<div css={overlayCss}>
|
||||
<Annotations value={src} annotations={highlight} />
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div css={blockCss}>
|
||||
{backdrops}
|
||||
<div css={inputCss}>
|
||||
<FlexibleInput multiline value={src} onChange={(e) => onChange(e.target.value)} />
|
||||
</div>
|
||||
{overlay}
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiButton, EuiPanel, EuiSpacer } from '@elastic/eui';
|
||||
import { Walker } from '@kbn/esql-ast';
|
||||
import { EsqlEditor } from '../../../esql_editor/esql_editor';
|
||||
import { useEsqlInspector } from '../../context';
|
||||
import { useBehaviorSubject } from '../../../../hooks/use_behavior_subject';
|
||||
import { Annotation } from '../../../annotations';
|
||||
|
||||
export const Editor: React.FC = () => {
|
||||
const state = useEsqlInspector();
|
||||
const src = useBehaviorSubject(state.src$);
|
||||
const highlight = useBehaviorSubject(state.highlight$);
|
||||
const focusedNode = useBehaviorSubject(state.focusedNode$);
|
||||
const limit = useBehaviorSubject(state.limit$);
|
||||
|
||||
const targetsBackdrop: Annotation[] = [];
|
||||
const focusBackdrop: Annotation[] = [];
|
||||
const query = state.query$.getValue();
|
||||
|
||||
if (focusedNode) {
|
||||
const location = focusedNode.location;
|
||||
|
||||
if (location) {
|
||||
focusBackdrop.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => (
|
||||
<span
|
||||
style={{
|
||||
display: 'inline-block',
|
||||
margin: -4,
|
||||
padding: 4,
|
||||
borderRadius: 4,
|
||||
background: 'rgb(190, 237, 224)',
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</span>
|
||||
),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
if (query) {
|
||||
Walker.walk(query.ast, {
|
||||
visitSource: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
targetsBackdrop.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => (
|
||||
<span
|
||||
style={{
|
||||
display: 'inline-block',
|
||||
margin: -4,
|
||||
padding: 4,
|
||||
borderRadius: 4,
|
||||
background: 'rgb(255, 243, 191)',
|
||||
}}
|
||||
onMouseEnter={() => {
|
||||
state.focusedNode$.next(node);
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</span>
|
||||
),
|
||||
]);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
const location = limit.location;
|
||||
|
||||
if (!location) return null;
|
||||
|
||||
targetsBackdrop.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => (
|
||||
<span
|
||||
style={{
|
||||
display: 'inline-block',
|
||||
margin: -4,
|
||||
padding: 4,
|
||||
borderRadius: 4,
|
||||
background: 'rgb(255, 243, 191)',
|
||||
}}
|
||||
onMouseEnter={() => {
|
||||
state.focusedNode$.next(limit);
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</span>
|
||||
),
|
||||
]);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiPanel paddingSize="l">
|
||||
<div>
|
||||
<EuiButton
|
||||
size={'s'}
|
||||
color="text"
|
||||
onClick={() => {
|
||||
const value = state.query$.getValue();
|
||||
|
||||
if (!value) {
|
||||
return;
|
||||
}
|
||||
|
||||
state.src$.next(value.print());
|
||||
}}
|
||||
>
|
||||
Re-format
|
||||
</EuiButton>
|
||||
</div>
|
||||
<EuiSpacer size={'m'} />
|
||||
<EsqlEditor
|
||||
src={src}
|
||||
onChange={(newSrc) => state.src$.next(newSrc)}
|
||||
backdrops={[targetsBackdrop, focusBackdrop]}
|
||||
highlight={highlight}
|
||||
/>
|
||||
</EuiPanel>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { CodeEditor } from '@kbn/code-editor';
|
||||
import { EuiPanel, EuiSpacer } from '@elastic/eui';
|
||||
import { useEsqlInspector } from '../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../hooks/use_behavior_subject';
|
||||
|
||||
export const PreviewAst: React.FC = (props) => {
|
||||
const state = useEsqlInspector();
|
||||
const query = useBehaviorSubject(state.queryLastValid$);
|
||||
|
||||
if (!query) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer size="l" />
|
||||
<EuiPanel paddingSize="xs" hasShadow={false} hasBorder style={{ height: 600 }}>
|
||||
<CodeEditor
|
||||
allowFullScreen={true}
|
||||
languageId={'json'}
|
||||
value={JSON.stringify(query.ast, null, 2)}
|
||||
/>
|
||||
</EuiPanel>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import {
|
||||
EuiFieldText,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiSpacer,
|
||||
EuiSwitch,
|
||||
} from '@elastic/eui';
|
||||
import { PrettyPrintBasic } from '../../../../../../../pretty_print_basic';
|
||||
|
||||
export interface BasicPrinterProps {
|
||||
src: string;
|
||||
}
|
||||
|
||||
export const BasicPrinter: React.FC<BasicPrinterProps> = ({ src }) => {
|
||||
const [lowercase, setLowercase] = React.useState(false);
|
||||
const [multiline, setMultiline] = React.useState(false);
|
||||
const [pipeTab, setPipeTab] = React.useState(' ');
|
||||
|
||||
return (
|
||||
<EuiFlexGroup style={{ maxWidth: 1200 }} alignItems={'flexStart'}>
|
||||
<EuiFlexItem>
|
||||
<PrettyPrintBasic src={src} opts={{ multiline, pipeTab, lowercase }} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false} style={{ width: 300 }}>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem>
|
||||
<EuiFormRow label="Lowercase">
|
||||
<EuiSwitch
|
||||
label="Lowercase"
|
||||
checked={lowercase}
|
||||
onChange={() => setLowercase((x) => !x)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiFormRow label="Multiline">
|
||||
<EuiSwitch
|
||||
label="Multiline"
|
||||
checked={multiline}
|
||||
onChange={() => setMultiline((x) => !x)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
|
||||
<EuiSpacer size="m" />
|
||||
|
||||
{!!multiline && (
|
||||
<EuiFormRow label="Pipe tab" helpText="Tabbing before command pipe">
|
||||
<EuiFieldText compressed value={pipeTab} onChange={(e) => setPipeTab(e.target.value)} />
|
||||
</EuiFormRow>
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import {
|
||||
EuiFieldText,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiRange,
|
||||
EuiSpacer,
|
||||
EuiSwitch,
|
||||
} from '@elastic/eui';
|
||||
import { PrettyPrint } from '../../../../../../../pretty_print';
|
||||
|
||||
export interface WrappingPrinterProps {
|
||||
src: string;
|
||||
}
|
||||
|
||||
export const WrappingPrinter: React.FC<WrappingPrinterProps> = ({ src }) => {
|
||||
const [lowercase, setLowercase] = React.useState(false);
|
||||
const [multiline, setMultiline] = React.useState(false);
|
||||
const [wrap, setWrap] = React.useState(80);
|
||||
const [tab, setTab] = React.useState(' ');
|
||||
const [pipeTab, setPipeTab] = React.useState(' ');
|
||||
const [indent, setIndent] = React.useState('');
|
||||
|
||||
return (
|
||||
<EuiFlexGroup style={{ maxWidth: 1200 }} alignItems={'flexStart'}>
|
||||
<EuiFlexItem>
|
||||
<PrettyPrint src={src} opts={{ lowercase, multiline, wrap, tab, pipeTab, indent }} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false} style={{ width: 300 }}>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem>
|
||||
<EuiFormRow label="Lowercase">
|
||||
<EuiSwitch
|
||||
label="Lowercase"
|
||||
checked={lowercase}
|
||||
onChange={() => setLowercase((x) => !x)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiFormRow label="Multiline">
|
||||
<EuiSwitch
|
||||
label="Multiline"
|
||||
checked={multiline}
|
||||
onChange={() => setMultiline((x) => !x)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
|
||||
<EuiSpacer size="m" />
|
||||
|
||||
<EuiFormRow label="Line width">
|
||||
<EuiRange
|
||||
min={20}
|
||||
max={150}
|
||||
value={wrap}
|
||||
onChange={(e) => setWrap(Number(e.currentTarget.value))}
|
||||
showInput
|
||||
aria-label="Wrapping line width"
|
||||
/>
|
||||
</EuiFormRow>
|
||||
|
||||
<EuiFormRow label="Initial indentation" helpText="Indentation applied to all lines">
|
||||
<EuiFieldText compressed value={indent} onChange={(e) => setIndent(e.target.value)} />
|
||||
</EuiFormRow>
|
||||
|
||||
<EuiFormRow label="Tab" helpText="Tabbing for each new indentation level">
|
||||
<EuiFieldText compressed value={tab} onChange={(e) => setTab(e.target.value)} />
|
||||
</EuiFormRow>
|
||||
|
||||
<EuiFormRow label="Pipe tab" helpText="Tabbing before command pipe">
|
||||
<EuiFieldText compressed value={pipeTab} onChange={(e) => setPipeTab(e.target.value)} />
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiCode, EuiPanel, EuiSpacer, EuiText } from '@elastic/eui';
|
||||
import { useEsqlInspector } from '../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../hooks/use_behavior_subject';
|
||||
import { WrappingPrinter } from './components/wrapping_printer';
|
||||
import { BasicPrinter } from './components/basic_printer';
|
||||
|
||||
export const PreviewPrint: React.FC = (props) => {
|
||||
const state = useEsqlInspector();
|
||||
const src = useBehaviorSubject(state.src$);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer size="l" />
|
||||
<EuiPanel hasShadow={false} hasBorder>
|
||||
<EuiText>
|
||||
<p>
|
||||
Formatted with <EuiCode>WrappingPrettyPrinter</EuiCode>:
|
||||
</p>
|
||||
</EuiText>
|
||||
<EuiSpacer />
|
||||
<WrappingPrinter src={src} />
|
||||
|
||||
<EuiSpacer size="l" />
|
||||
|
||||
<EuiText>
|
||||
<p>
|
||||
Formatted with <EuiCode>BasicPrettyPrinter</EuiCode>:
|
||||
</p>
|
||||
</EuiText>
|
||||
<EuiSpacer />
|
||||
<BasicPrinter src={src} />
|
||||
</EuiPanel>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiDataGrid, EuiPanel, EuiSpacer } from '@elastic/eui';
|
||||
import { createParser } from '@kbn/esql-ast';
|
||||
import { useEsqlInspector } from '../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../hooks/use_behavior_subject';
|
||||
|
||||
const columns = [
|
||||
{
|
||||
id: 'token',
|
||||
display: 'Token',
|
||||
},
|
||||
{
|
||||
id: 'symbol',
|
||||
display: 'Symbol',
|
||||
},
|
||||
{
|
||||
id: 'type',
|
||||
display: 'Type',
|
||||
},
|
||||
{
|
||||
id: 'channel',
|
||||
display: 'Channel',
|
||||
},
|
||||
];
|
||||
|
||||
const symbolicNames = createParser('').lexer.symbolicNames;
|
||||
|
||||
export const PreviewTokens: React.FC = (props) => {
|
||||
const state = useEsqlInspector();
|
||||
const query = useBehaviorSubject(state.queryLastValid$);
|
||||
|
||||
const [visibleColumns, setVisibleColumns] = React.useState(columns.map(({ id }) => id));
|
||||
|
||||
if (!query) {
|
||||
return null;
|
||||
}
|
||||
|
||||
interface Row {
|
||||
token: string;
|
||||
symbol: string;
|
||||
type: number;
|
||||
channel: number;
|
||||
}
|
||||
|
||||
const data: Row[] = [];
|
||||
|
||||
for (const token of query.tokens) {
|
||||
data.push({
|
||||
token: token.text,
|
||||
symbol: symbolicNames[token.type] ?? '',
|
||||
type: token.type,
|
||||
channel: token.channel,
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer size="l" />
|
||||
<EuiPanel paddingSize="xs" hasShadow={false} hasBorder style={{ height: 600 }}>
|
||||
<EuiDataGrid
|
||||
aria-label="Container constrained data grid demo"
|
||||
columns={columns}
|
||||
columnVisibility={{
|
||||
visibleColumns,
|
||||
setVisibleColumns,
|
||||
}}
|
||||
rowCount={data.length}
|
||||
gridStyle={{
|
||||
border: 'horizontal',
|
||||
header: 'underline',
|
||||
}}
|
||||
renderCellValue={({ rowIndex, columnId }) => (data as any)[rowIndex][columnId]}
|
||||
/>
|
||||
</EuiPanel>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiButton, EuiFormRow, EuiPanel, EuiSpacer, EuiTitle } from '@elastic/eui';
|
||||
import { Builder, ESQLSource } from '@kbn/esql-ast';
|
||||
import { useEsqlInspector } from '../../../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../../../hooks/use_behavior_subject';
|
||||
import { Source } from './source';
|
||||
|
||||
export const FromCommand: React.FC = () => {
|
||||
const state = useEsqlInspector();
|
||||
const from = useBehaviorSubject(state.from$);
|
||||
|
||||
if (!from) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const sources: React.ReactNode[] = [];
|
||||
let i = 0;
|
||||
|
||||
for (const arg of from.args) {
|
||||
if ((arg as any).type !== 'source') continue;
|
||||
sources.push(<Source key={i} index={i + 1} node={arg as ESQLSource} />);
|
||||
i++;
|
||||
}
|
||||
|
||||
return (
|
||||
<EuiPanel hasShadow={false} hasBorder style={{ maxWidth: 360 }}>
|
||||
<EuiTitle size="xxs">
|
||||
<h3>Sources</h3>
|
||||
</EuiTitle>
|
||||
<div
|
||||
css={{
|
||||
paddingTop: 4,
|
||||
}}
|
||||
>
|
||||
{sources}
|
||||
</div>
|
||||
<EuiSpacer size={'m'} />
|
||||
<EuiFormRow fullWidth>
|
||||
<EuiButton
|
||||
fullWidth
|
||||
size={'s'}
|
||||
color="text"
|
||||
onClick={() => {
|
||||
const length = from.args.length;
|
||||
const source = Builder.expression.source({
|
||||
name: `source${length + 1}`,
|
||||
sourceType: 'index',
|
||||
});
|
||||
from.args.push(source);
|
||||
state.reprint();
|
||||
}}
|
||||
>
|
||||
Add source
|
||||
</EuiButton>
|
||||
</EuiFormRow>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import {
|
||||
EuiButtonIcon,
|
||||
EuiFieldText,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiIcon,
|
||||
EuiSpacer,
|
||||
EuiToolTip,
|
||||
} from '@elastic/eui';
|
||||
import { ESQLSource } from '@kbn/esql-ast';
|
||||
import { ESQLAstBaseItem } from '@kbn/esql-ast/src/types';
|
||||
import { useEsqlInspector } from '../../../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../../../hooks/use_behavior_subject';
|
||||
|
||||
const getFirstComment = (node: ESQLAstBaseItem): string | undefined => {
|
||||
const list = node.formatting?.top ?? node.formatting?.left ?? node.formatting?.right;
|
||||
if (list) {
|
||||
for (const decoration of list) {
|
||||
if (decoration.type === 'comment') {
|
||||
return decoration.text;
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export interface SourceProps {
|
||||
node: ESQLSource;
|
||||
index: number;
|
||||
}
|
||||
|
||||
export const Source: React.FC<SourceProps> = ({ node, index }) => {
|
||||
const state = useEsqlInspector();
|
||||
const query = useBehaviorSubject(state.queryLastValid$);
|
||||
const focusedNode = useBehaviorSubject(state.focusedNode$);
|
||||
|
||||
if (!query) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const comment = getFirstComment(node);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer size={'m'} />
|
||||
<div
|
||||
onMouseEnter={() => {
|
||||
state.focusedNode$.next(node);
|
||||
}}
|
||||
style={{
|
||||
background: focusedNode === node ? 'rgb(190, 237, 224)' : 'transparent',
|
||||
padding: 8,
|
||||
margin: -8,
|
||||
borderRadius: 8,
|
||||
position: 'relative',
|
||||
}}
|
||||
>
|
||||
<EuiFormRow
|
||||
fullWidth
|
||||
helpText={getFirstComment(node)}
|
||||
label={
|
||||
comment ? (
|
||||
<EuiToolTip content={comment}>
|
||||
<span>
|
||||
Source {index} <EuiIcon type="editorComment" color="subdued" />
|
||||
</span>
|
||||
</EuiToolTip>
|
||||
) : (
|
||||
<>Source {index}</>
|
||||
)
|
||||
}
|
||||
>
|
||||
<EuiFieldText
|
||||
fullWidth
|
||||
value={node.name}
|
||||
onChange={(e) => {
|
||||
node.name = e.target.value;
|
||||
state.reprint();
|
||||
}}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
<div style={{ position: 'absolute', right: 0, top: 0 }}>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonIcon
|
||||
iconType="cross"
|
||||
aria-label="Remove"
|
||||
onClick={() => {
|
||||
if (!query) return;
|
||||
const from = state.from$.getValue();
|
||||
if (!from) return;
|
||||
from.args = from.args.filter((c) => c !== node);
|
||||
state.reprint();
|
||||
}}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,126 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import {
|
||||
EuiButton,
|
||||
EuiButtonIcon,
|
||||
EuiFieldText,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiPanel,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
import { Builder } from '@kbn/esql-ast';
|
||||
import { useEsqlInspector } from '../../../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../../../hooks/use_behavior_subject';
|
||||
|
||||
export const LimitCommand: React.FC = () => {
|
||||
const state = useEsqlInspector();
|
||||
const limit = useBehaviorSubject(state.limit$);
|
||||
const focusedNode = useBehaviorSubject(state.focusedNode$);
|
||||
|
||||
if (!limit) {
|
||||
return (
|
||||
<EuiPanel hasShadow={false} hasBorder style={{ maxWidth: 360 }}>
|
||||
<EuiFormRow fullWidth>
|
||||
<EuiButton
|
||||
fullWidth
|
||||
size={'s'}
|
||||
color="text"
|
||||
onClick={() => {
|
||||
const query = state.query$.getValue();
|
||||
if (!query) return;
|
||||
const literal = Builder.expression.literal.numeric({
|
||||
value: 10,
|
||||
literalType: 'integer',
|
||||
});
|
||||
const command = Builder.command({
|
||||
name: 'limit',
|
||||
args: [literal],
|
||||
});
|
||||
query.ast.commands.push(command);
|
||||
state.reprint();
|
||||
}}
|
||||
>
|
||||
Add limit
|
||||
</EuiButton>
|
||||
</EuiFormRow>
|
||||
</EuiPanel>
|
||||
);
|
||||
}
|
||||
|
||||
const value = +(limit.args[0] as any)?.value;
|
||||
|
||||
if (typeof value !== 'number') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<EuiPanel hasShadow={false} hasBorder style={{ maxWidth: 360 }}>
|
||||
<div
|
||||
onMouseEnter={() => {
|
||||
state.focusedNode$.next(limit);
|
||||
}}
|
||||
style={{
|
||||
background: focusedNode === limit ? 'rgb(190, 237, 224)' : 'transparent',
|
||||
padding: 8,
|
||||
margin: -8,
|
||||
borderRadius: 8,
|
||||
position: 'relative',
|
||||
}}
|
||||
>
|
||||
<EuiTitle size="xxs">
|
||||
<h3>Limit</h3>
|
||||
</EuiTitle>
|
||||
<div
|
||||
css={{
|
||||
paddingTop: 16,
|
||||
}}
|
||||
>
|
||||
<EuiFormRow fullWidth>
|
||||
<EuiFieldText
|
||||
fullWidth
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
const newValue = +e.target.value;
|
||||
|
||||
if (newValue !== newValue) {
|
||||
return;
|
||||
}
|
||||
|
||||
const literal = Builder.expression.literal.numeric({
|
||||
value: newValue,
|
||||
literalType: 'integer',
|
||||
});
|
||||
|
||||
limit.args[0] = literal;
|
||||
state.reprint();
|
||||
}}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</div>
|
||||
<div style={{ position: 'absolute', right: 0, top: 0 }}>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonIcon
|
||||
iconType="cross"
|
||||
aria-label="Remove"
|
||||
onClick={() => {
|
||||
const query = state.query$.getValue();
|
||||
if (!query) return;
|
||||
query.ast.commands = query.ast.commands.filter((c) => c !== limit);
|
||||
state.reprint();
|
||||
}}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</div>
|
||||
</div>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiSpacer } from '@elastic/eui';
|
||||
import { useEsqlInspector } from '../../../../context';
|
||||
import { useBehaviorSubject } from '../../../../../../hooks/use_behavior_subject';
|
||||
import { FromCommand } from './components/from_command';
|
||||
import { LimitCommand } from './components/limit_command';
|
||||
|
||||
export const PreviewUi: React.FC = (props) => {
|
||||
const state = useEsqlInspector();
|
||||
const query = useBehaviorSubject(state.queryLastValid$);
|
||||
|
||||
if (!query) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer />
|
||||
<FromCommand />
|
||||
<EuiSpacer />
|
||||
<LimitCommand />
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiTabbedContent, EuiTabbedContentProps } from '@elastic/eui';
|
||||
import { PreviewAst } from './components/preview_ast';
|
||||
import { PreviewTokens } from './components/preview_tokens';
|
||||
import { PreviewUi } from './components/preview_ui';
|
||||
import { PreviewPrint } from './components/preview_print';
|
||||
|
||||
export const Preview: React.FC = () => {
|
||||
const tabs: EuiTabbedContentProps['tabs'] = [
|
||||
{
|
||||
id: 'ui',
|
||||
name: 'UI',
|
||||
content: <PreviewUi />,
|
||||
},
|
||||
{
|
||||
id: 'formatter',
|
||||
name: 'Formatter',
|
||||
content: <PreviewPrint />,
|
||||
},
|
||||
{
|
||||
id: 'ast',
|
||||
name: 'AST',
|
||||
content: <PreviewAst />,
|
||||
},
|
||||
{
|
||||
id: 'tokens',
|
||||
name: 'Tokens',
|
||||
content: <PreviewTokens />,
|
||||
},
|
||||
];
|
||||
|
||||
return <EuiTabbedContent tabs={tabs} />;
|
||||
};
|
|
@ -7,15 +7,9 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import type { RecognitionException } from 'antlr4';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
import * as React from 'react';
|
||||
import { EsqlInspectorState } from './esql_inspector_state';
|
||||
|
||||
export function createError(exception: RecognitionException) {
|
||||
const token = exception.offendingToken;
|
||||
export const context = React.createContext<EsqlInspectorState | null>(null);
|
||||
|
||||
return {
|
||||
type: 'error' as const,
|
||||
text: `SyntaxError: ${exception.message}`,
|
||||
location: getPosition(token),
|
||||
};
|
||||
}
|
||||
export const useEsqlInspector = () => React.useContext(context)!;
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EsqlInspector } from './esql_inspector';
|
||||
|
||||
export default {
|
||||
title: '<EsqlInspector>',
|
||||
parameters: {},
|
||||
};
|
||||
|
||||
export const Default = () => <EsqlInspector />;
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EsqlInspectorState } from './esql_inspector_state';
|
||||
import { context } from './context';
|
||||
import { EsqlInspectorConnected } from './esql_inspector_connected';
|
||||
|
||||
export interface EsqlInspectorProps {
|
||||
state?: EsqlInspectorState;
|
||||
}
|
||||
|
||||
export const EsqlInspector: React.FC<EsqlInspectorProps> = (props) => {
|
||||
const state = React.useMemo(() => {
|
||||
return props.state ?? new EsqlInspectorState();
|
||||
}, [props.state]);
|
||||
|
||||
return (
|
||||
<context.Provider value={state}>
|
||||
<EsqlInspectorConnected />
|
||||
</context.Provider>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiSpacer } from '@elastic/eui';
|
||||
import { EsqlInspectorState } from './esql_inspector_state';
|
||||
import { Editor } from './components/editor';
|
||||
import { Preview } from './components/preview';
|
||||
|
||||
export interface EsqlInspectorConnectedProps {
|
||||
state?: EsqlInspectorState;
|
||||
}
|
||||
|
||||
export const EsqlInspectorConnected: React.FC<EsqlInspectorConnectedProps> = (props) => {
|
||||
return (
|
||||
<>
|
||||
<EuiFlexGroup style={{ maxWidth: 1200 }}>
|
||||
<EuiFlexItem>
|
||||
<Editor />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
|
||||
<EuiSpacer size="l" />
|
||||
|
||||
<EuiFlexGroup style={{ maxWidth: 1200 }}>
|
||||
<EuiFlexItem>
|
||||
<Preview />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
import { ESQLCommand, EsqlQuery, Walker } from '@kbn/esql-ast';
|
||||
import { ESQLProperNode } from '@kbn/esql-ast/src/types';
|
||||
import { Annotation } from '../annotations';
|
||||
import { highlight } from './helpers';
|
||||
|
||||
const defaultSrc = `FROM kibana_sample_data_logs, another_index
|
||||
| KEEP bytes, clientip, url.keyword, response.keyword
|
||||
| STATS Visits = COUNT(), Unique = COUNT_DISTINCT(clientip),
|
||||
p95 = PERCENTILE(bytes, 95), median = MEDIAN(bytes)
|
||||
BY type, url.keyword
|
||||
| EVAL total_records = TO_DOUBLE(count_4xx + count_5xx + count_rest)
|
||||
| DROP count_4xx, count_rest, total_records
|
||||
| LIMIT 123`;
|
||||
|
||||
export class EsqlInspectorState {
|
||||
public readonly src$ = new BehaviorSubject<string>(defaultSrc);
|
||||
public readonly query$ = new BehaviorSubject<EsqlQuery | null>(null);
|
||||
public readonly queryLastValid$ = new BehaviorSubject<EsqlQuery | null>(EsqlQuery.fromSrc(''));
|
||||
public readonly highlight$ = new BehaviorSubject<Annotation[]>([]);
|
||||
public readonly from$ = new BehaviorSubject<ESQLCommand | null>(null);
|
||||
public readonly limit$ = new BehaviorSubject<ESQLCommand | null>(null);
|
||||
public readonly focusedNode$ = new BehaviorSubject<ESQLProperNode | null>(null);
|
||||
|
||||
constructor() {
|
||||
this.src$.subscribe((src) => {
|
||||
this.focusedNode$.next(null);
|
||||
try {
|
||||
this.query$.next(EsqlQuery.fromSrc(src, { withFormatting: true }));
|
||||
} catch (e) {
|
||||
this.query$.next(null);
|
||||
}
|
||||
});
|
||||
|
||||
this.query$.subscribe((query) => {
|
||||
if (query instanceof EsqlQuery) {
|
||||
this.queryLastValid$.next(query);
|
||||
|
||||
this.highlight$.next(highlight(query));
|
||||
|
||||
const from = Walker.match(query?.ast, {
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
});
|
||||
|
||||
if (from) {
|
||||
this.from$.next(from as ESQLCommand);
|
||||
} else {
|
||||
this.from$.next(null);
|
||||
}
|
||||
|
||||
const limit = Walker.match(query?.ast, {
|
||||
type: 'command',
|
||||
name: 'limit',
|
||||
});
|
||||
|
||||
if (limit) {
|
||||
this.limit$.next(limit as ESQLCommand);
|
||||
} else {
|
||||
this.limit$.next(null);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public readonly reprint = () => {
|
||||
const query = this.query$.getValue();
|
||||
|
||||
if (!query) {
|
||||
return;
|
||||
}
|
||||
|
||||
const src = query.print();
|
||||
this.src$.next(src);
|
||||
};
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { EsqlQuery, Walker } from '@kbn/esql-ast';
|
||||
import { euiPaletteColorBlind } from '@elastic/eui';
|
||||
import { Annotation } from '../annotations';
|
||||
|
||||
const palette = euiPaletteColorBlind();
|
||||
|
||||
const colors = {
|
||||
command: palette[2],
|
||||
literal: palette[0],
|
||||
source: palette[3],
|
||||
operator: palette[9],
|
||||
column: palette[6],
|
||||
function: palette[8],
|
||||
};
|
||||
|
||||
export const highlight = (query: EsqlQuery): Annotation[] => {
|
||||
const annotations: Annotation[] = [];
|
||||
|
||||
Walker.walk(query.ast, {
|
||||
visitCommand: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
const color = node.name === 'from' ? '#07f' : colors.command;
|
||||
annotations.push([
|
||||
location.min,
|
||||
location.min + node.name.length,
|
||||
(text) => <span style={{ color, fontWeight: 'bold' }}>{text}</span>,
|
||||
]);
|
||||
},
|
||||
|
||||
visitSource: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
annotations.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => <span style={{ color: colors.source }}>{text}</span>,
|
||||
]);
|
||||
},
|
||||
|
||||
visitColumn: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
annotations.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => <span style={{ color: colors.column }}>{text}</span>,
|
||||
]);
|
||||
},
|
||||
|
||||
visitFunction: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
if (node.subtype === 'variadic-call') {
|
||||
annotations.push([
|
||||
location.min,
|
||||
location.min + node.name.length,
|
||||
(text) => <span style={{ color: colors.function }}>{text}</span>,
|
||||
]);
|
||||
}
|
||||
},
|
||||
|
||||
visitLiteral: (node) => {
|
||||
const location = node.location;
|
||||
if (!location) return;
|
||||
annotations.push([
|
||||
location.min,
|
||||
location.max + 1,
|
||||
(text) => <span style={{ color: colors.literal }}>{text}</span>,
|
||||
]);
|
||||
},
|
||||
});
|
||||
|
||||
Walker.visitComments(query.ast, (comment) => {
|
||||
annotations.push([
|
||||
comment.location.min,
|
||||
comment.location.max,
|
||||
(text) => <span style={{ opacity: 0.3 }}>{text}</span>,
|
||||
]);
|
||||
});
|
||||
|
||||
for (const token of query.tokens) {
|
||||
switch (token.type) {
|
||||
// PIPE
|
||||
case 30: {
|
||||
const pos = token.start;
|
||||
|
||||
annotations.push([
|
||||
pos,
|
||||
pos + 1,
|
||||
(text) => <span style={{ fontWeight: 'bold', opacity: 0.3 }}>{text}</span>,
|
||||
]);
|
||||
|
||||
break;
|
||||
}
|
||||
case 34: // BY
|
||||
case 78: {
|
||||
// METADATA
|
||||
const pos = token.start;
|
||||
|
||||
annotations.push([
|
||||
pos,
|
||||
pos + token.text.length,
|
||||
(text) => <span style={{ color: colors.command, fontWeight: 'bold' }}>{text}</span>,
|
||||
]);
|
||||
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
switch (token.text) {
|
||||
case '+':
|
||||
case '-':
|
||||
case '*':
|
||||
case '/':
|
||||
case '%':
|
||||
case '!=':
|
||||
case '>':
|
||||
case '>=':
|
||||
case '<':
|
||||
case '<=':
|
||||
case 'and':
|
||||
case 'AND':
|
||||
case 'or':
|
||||
case 'OR':
|
||||
case 'not':
|
||||
case 'NOT': {
|
||||
annotations.push([
|
||||
token.start,
|
||||
token.start + token.text.length,
|
||||
(text) => <span style={{ color: colors.operator }}>{text}</span>,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
annotations.sort((a, b) => a[0] - b[0]);
|
||||
|
||||
return annotations;
|
||||
};
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { context, useEsqlInspector } from './context';
|
||||
export { EsqlInspectorState } from './esql_inspector_state';
|
||||
export { EsqlInspector, type EsqlInspectorProps } from './esql_inspector';
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { FlexibleInput, FlexibleInputProps } from './flexible_input';
|
||||
|
||||
export default {
|
||||
title: '<FlexibleInput>',
|
||||
parameters: {},
|
||||
};
|
||||
|
||||
const Demo: React.FC<FlexibleInputProps> = (props) => {
|
||||
const [value, setValue] = React.useState(props.value);
|
||||
|
||||
return (
|
||||
<code>
|
||||
<FlexibleInput
|
||||
{...props}
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
setValue(e.target.value);
|
||||
}}
|
||||
/>
|
||||
</code>
|
||||
);
|
||||
};
|
||||
|
||||
const src = `FROM index, index2
|
||||
| WHERE language == "esql"
|
||||
| LIMIT 10
|
||||
`;
|
||||
|
||||
export const Example = () => <Demo value={src} multiline />;
|
|
@ -0,0 +1,218 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { css } from '@emotion/react';
|
||||
import { copyStyles } from './helpers';
|
||||
|
||||
const blockCss = css({
|
||||
display: 'inline-block',
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
});
|
||||
|
||||
const inputCss = css({
|
||||
display: 'inline-block',
|
||||
verticalAlign: 'bottom',
|
||||
boxSizing: 'border-box',
|
||||
overflow: 'hidden',
|
||||
padding: 0,
|
||||
margin: 0,
|
||||
background: 0,
|
||||
outline: '0 !important',
|
||||
border: 0,
|
||||
color: 'inherit',
|
||||
fontWeight: 'inherit',
|
||||
fontFamily: 'inherit',
|
||||
fontSize: 'inherit',
|
||||
lineHeight: 'inherit',
|
||||
whiteSpace: 'pre',
|
||||
resize: 'none',
|
||||
});
|
||||
|
||||
const sizerCss = css({
|
||||
display: 'inline-block',
|
||||
position: 'absolute',
|
||||
pointerEvents: 'none',
|
||||
userSelect: 'none',
|
||||
boxSizing: 'border-box',
|
||||
top: 0,
|
||||
left: 0,
|
||||
border: 0,
|
||||
whiteSpace: 'pre',
|
||||
});
|
||||
|
||||
export interface FlexibleInputProps {
|
||||
/** The string to display and edit. */
|
||||
value: string;
|
||||
|
||||
/** Ref to the input element. */
|
||||
inp?: (el: HTMLInputElement | HTMLTextAreaElement | null) => void;
|
||||
|
||||
/** Whether the input is multiline. */
|
||||
multiline?: boolean;
|
||||
|
||||
/** Whether to wrap text to a new line when it exceeds the length of current. */
|
||||
wrap?: boolean;
|
||||
|
||||
/**
|
||||
* Whether the input should take the full width of the parent, even when there
|
||||
* is not enough text to do that naturally with content.
|
||||
*/
|
||||
fullWidth?: boolean;
|
||||
|
||||
/** Typeahead string to add to the value. It is visible at half opacity. */
|
||||
typeahead?: string;
|
||||
|
||||
/** Addition width to add, for example, to account for number stepper. */
|
||||
extraWidth?: number;
|
||||
|
||||
/** Minimum width to allow. */
|
||||
minWidth?: number;
|
||||
|
||||
/** Maximum width to allow. */
|
||||
maxWidth?: number;
|
||||
|
||||
/** Whether the input is focused on initial render. */
|
||||
focus?: boolean;
|
||||
|
||||
/** Callback for when the input value changes. */
|
||||
onChange?: React.ChangeEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when the input is focused. */
|
||||
onFocus?: React.FocusEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when the input is blurred. */
|
||||
onBlur?: React.FocusEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when a key is pressed. */
|
||||
onKeyDown?: React.KeyboardEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when the Enter key is pressed. */
|
||||
onSubmit?: React.KeyboardEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when the Escape key is pressed. */
|
||||
onCancel?: React.KeyboardEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
|
||||
/** Callback for when the Tab key is pressed. */
|
||||
onTab?: React.KeyboardEventHandler<HTMLInputElement | HTMLTextAreaElement>;
|
||||
}
|
||||
|
||||
export const FlexibleInput: React.FC<FlexibleInputProps> = ({
|
||||
value,
|
||||
inp,
|
||||
multiline,
|
||||
wrap,
|
||||
fullWidth,
|
||||
typeahead = '',
|
||||
extraWidth,
|
||||
minWidth = 8,
|
||||
maxWidth,
|
||||
focus,
|
||||
onChange,
|
||||
onFocus,
|
||||
onBlur,
|
||||
onKeyDown,
|
||||
onSubmit,
|
||||
onCancel,
|
||||
onTab,
|
||||
}) => {
|
||||
const inputRef = React.useRef<HTMLInputElement | HTMLTextAreaElement>(null);
|
||||
const sizerRef = React.useRef<HTMLSpanElement>(null);
|
||||
const sizerValueRef = React.useRef<HTMLSpanElement>(null);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
if (!inputRef.current || !sizerRef.current) return;
|
||||
if (focus) inputRef.current.focus();
|
||||
copyStyles(inputRef.current, sizerRef.current!, [
|
||||
'font',
|
||||
'fontSize',
|
||||
'fontFamily',
|
||||
'fontWeight',
|
||||
'fontStyle',
|
||||
'letterSpacing',
|
||||
'textTransform',
|
||||
'boxSizing',
|
||||
]);
|
||||
}, [focus]);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
const sizerValue = sizerValueRef.current;
|
||||
if (sizerValue) sizerValue.textContent = value;
|
||||
const input = inputRef.current;
|
||||
const sizer = sizerRef.current;
|
||||
if (!input || !sizer) return;
|
||||
let width = sizer.scrollWidth;
|
||||
if (extraWidth) width += extraWidth;
|
||||
if (minWidth) width = Math.max(width, minWidth);
|
||||
if (maxWidth) width = Math.min(width, maxWidth);
|
||||
const style = input.style;
|
||||
style.width = width + 'px';
|
||||
if (multiline) {
|
||||
const height = sizer.scrollHeight;
|
||||
style.height = height + 'px';
|
||||
}
|
||||
}, [value, extraWidth, minWidth, maxWidth, multiline]);
|
||||
|
||||
const attr: React.InputHTMLAttributes<HTMLInputElement | HTMLTextAreaElement> & { ref: any } = {
|
||||
ref: (input: unknown) => {
|
||||
(inputRef as any).current = input;
|
||||
if (inp) inp(input as HTMLInputElement | HTMLTextAreaElement);
|
||||
},
|
||||
value,
|
||||
style: {
|
||||
width: fullWidth ? '100%' : undefined,
|
||||
whiteSpace: wrap ? 'pre-wrap' : 'pre',
|
||||
display: fullWidth ? 'block' : 'inline-block',
|
||||
},
|
||||
onChange: (e) => {
|
||||
if (onChange) onChange(e);
|
||||
},
|
||||
onFocus,
|
||||
onBlur,
|
||||
onKeyDown: (e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && (!multiline || e.ctrlKey)) {
|
||||
if (onSubmit) onSubmit(e as any);
|
||||
} else if (e.key === 'Escape') {
|
||||
if (onCancel) onCancel(e as any);
|
||||
} else if (e.key === 'Tab') {
|
||||
if (onTab) onTab(e as any);
|
||||
}
|
||||
if (onKeyDown) onKeyDown(e as any);
|
||||
},
|
||||
};
|
||||
|
||||
const input = multiline ? (
|
||||
<textarea css={inputCss} {...attr} />
|
||||
) : (
|
||||
<input css={inputCss} {...attr} />
|
||||
);
|
||||
|
||||
const style: React.CSSProperties = {
|
||||
display: fullWidth ? 'block' : 'inline-block',
|
||||
width: fullWidth ? '100%' : undefined,
|
||||
overflowX: fullWidth ? 'auto' : undefined,
|
||||
whiteSpace: wrap ? 'pre-wrap' : 'pre',
|
||||
};
|
||||
|
||||
return (
|
||||
<span css={blockCss} style={style}>
|
||||
{input}
|
||||
<span
|
||||
ref={sizerRef}
|
||||
css={sizerCss}
|
||||
style={{ width: fullWidth ? '100%' : undefined, whiteSpace: wrap ? 'pre-wrap' : 'pre' }}
|
||||
>
|
||||
<span ref={sizerValueRef} style={{ visibility: 'hidden' }} />
|
||||
{'\u200b'}
|
||||
{!!typeahead && <span style={{ opacity: 0.7 }}>{typeahead}</span>}
|
||||
</span>
|
||||
</span>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export const copyStyles = (from: HTMLElement, to: HTMLElement, which: string[]): void => {
|
||||
const styles = window.getComputedStyle(from);
|
||||
if (!styles) return;
|
||||
for (const property of which) (to.style as any)[property] = (styles as any)[property];
|
||||
};
|
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { PrettyPrint, type PrettyPrintProps } from './pretty_print';
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { parse, WrappingPrettyPrinter, WrappingPrettyPrinterOptions } from '@kbn/esql-ast';
|
||||
import * as React from 'react';
|
||||
import { EuiCodeBlock } from '@elastic/eui';
|
||||
|
||||
export interface PrettyPrintProps {
|
||||
src: string;
|
||||
opts?: WrappingPrettyPrinterOptions;
|
||||
}
|
||||
|
||||
export const PrettyPrint: React.FC<PrettyPrintProps> = ({ src, opts }) => {
|
||||
const formatted = React.useMemo(() => {
|
||||
try {
|
||||
const { root } = parse(src, { withFormatting: true });
|
||||
|
||||
return WrappingPrettyPrinter.print(root, opts);
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}, [src, opts]);
|
||||
|
||||
return (
|
||||
<EuiCodeBlock language="esql" fontSize="m" paddingSize="m">
|
||||
{formatted}
|
||||
</EuiCodeBlock>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { PrettyPrintBasic, type PrettyPrintBasicProps } from './pretty_print';
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { parse, BasicPrettyPrinter, BasicPrettyPrinterOptions } from '@kbn/esql-ast';
|
||||
import * as React from 'react';
|
||||
import { EuiCodeBlock } from '@elastic/eui';
|
||||
|
||||
export interface PrettyPrintBasicProps {
|
||||
src: string;
|
||||
opts?: BasicPrettyPrinterOptions;
|
||||
}
|
||||
|
||||
export const PrettyPrintBasic: React.FC<PrettyPrintBasicProps> = ({ src, opts }) => {
|
||||
const formatted = React.useMemo(() => {
|
||||
try {
|
||||
const { root } = parse(src, { withFormatting: true });
|
||||
|
||||
return BasicPrettyPrinter.print(root, opts);
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}, [src, opts]);
|
||||
|
||||
return (
|
||||
<EuiCodeBlock language="esql" fontSize="m" paddingSize="m">
|
||||
{formatted}
|
||||
</EuiCodeBlock>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import type { BehaviorSubject } from 'rxjs';
|
||||
|
||||
export const useBehaviorSubject = <T>(subject: BehaviorSubject<T>): T => {
|
||||
const ref = useRef<T>(subject.getValue());
|
||||
const [, setCnt] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
const subscription = subject.subscribe((value) => {
|
||||
ref.current = value;
|
||||
setCnt((prev) => prev + 1);
|
||||
});
|
||||
return () => subscription.unsubscribe();
|
||||
}, [subject]);
|
||||
|
||||
return ref.current;
|
||||
};
|
|
@ -23,7 +23,7 @@ export const mount =
|
|||
|
||||
const reactElement = (
|
||||
<i18nCore.Context>
|
||||
<App core={core} plugins={plugins} />
|
||||
<App />
|
||||
</i18nCore.Context>
|
||||
);
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ export type {
|
|||
ESQLLocation,
|
||||
ESQLMessage,
|
||||
ESQLSingleAstItem,
|
||||
ESQLAstQueryExpression,
|
||||
ESQLSource,
|
||||
ESQLColumn,
|
||||
ESQLLiteral,
|
||||
|
@ -29,18 +30,29 @@ export type {
|
|||
ESQLAstNode,
|
||||
} from './src/types';
|
||||
|
||||
// Low level functions to parse grammar
|
||||
export { getParser, getLexer, ROOT_STATEMENT } from './src/antlr_facade';
|
||||
export { Builder, type AstNodeParserFields, type AstNodeTemplate } from './src/builder';
|
||||
|
||||
/**
|
||||
* ES|QL Query string -> AST data structure
|
||||
* this is the foundational building block for any advanced feature
|
||||
* a developer wants to build on top of the ESQL language
|
||||
**/
|
||||
export { getAstAndSyntaxErrors } from './src/ast_parser';
|
||||
|
||||
export { ESQLErrorListener } from './src/antlr_error_listener';
|
||||
export {
|
||||
getParser,
|
||||
createParser,
|
||||
getLexer,
|
||||
parse,
|
||||
parseErrors,
|
||||
type ParseOptions,
|
||||
type ParseResult,
|
||||
getAstAndSyntaxErrors,
|
||||
ESQLErrorListener,
|
||||
} from './src/parser';
|
||||
|
||||
export { Walker, type WalkerOptions, walk } from './src/walker';
|
||||
|
||||
export { BasicPrettyPrinter } from './src/pretty_print/basic_pretty_printer';
|
||||
export {
|
||||
LeafPrinter,
|
||||
BasicPrettyPrinter,
|
||||
type BasicPrettyPrinterMultilineOptions,
|
||||
type BasicPrettyPrinterOptions,
|
||||
WrappingPrettyPrinter,
|
||||
type WrappingPrettyPrinterOptions,
|
||||
} from './src/pretty_print';
|
||||
|
||||
export { EsqlQuery } from './src/query';
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { CommonTokenStream, type CharStream, type ErrorListener } from 'antlr4';
|
||||
|
||||
import { default as ESQLLexer } from './antlr/esql_lexer';
|
||||
import { default as ESQLParser } from './antlr/esql_parser';
|
||||
import { default as ESQLParserListener } from './antlr/esql_parser_listener';
|
||||
|
||||
export const ROOT_STATEMENT = 'singleStatement';
|
||||
|
||||
export const getParser = (
|
||||
inputStream: CharStream,
|
||||
errorListener: ErrorListener<any>,
|
||||
parseListener?: ESQLParserListener
|
||||
) => {
|
||||
const lexer = getLexer(inputStream, errorListener);
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
const parser = new ESQLParser(tokenStream);
|
||||
|
||||
parser.removeErrorListeners();
|
||||
parser.addErrorListener(errorListener);
|
||||
|
||||
if (parseListener) {
|
||||
// @ts-expect-error the addParseListener API does exist and is documented here
|
||||
// https://github.com/antlr/antlr4/blob/dev/doc/listeners.md
|
||||
parser.addParseListener(parseListener);
|
||||
}
|
||||
|
||||
return parser;
|
||||
};
|
||||
|
||||
export const getLexer = (inputStream: CharStream, errorListener: ErrorListener<any>) => {
|
||||
const lexer = new ESQLLexer(inputStream);
|
||||
|
||||
lexer.removeErrorListeners();
|
||||
lexer.addErrorListener(errorListener);
|
||||
|
||||
return lexer;
|
||||
};
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { CharStreams } from 'antlr4';
|
||||
import { ESQLErrorListener } from './antlr_error_listener';
|
||||
import { getParser, ROOT_STATEMENT } from './antlr_facade';
|
||||
import { AstListener } from './ast_factory';
|
||||
import type { ESQLAst, EditorError } from './types';
|
||||
|
||||
// These will need to be manually updated whenever the relevant grammar changes.
|
||||
const SYNTAX_ERRORS_TO_IGNORE = [
|
||||
`SyntaxError: mismatched input '<EOF>' expecting {'explain', 'from', 'meta', 'row', 'show'}`,
|
||||
];
|
||||
|
||||
export function getAstAndSyntaxErrors(text: string | undefined): {
|
||||
errors: EditorError[];
|
||||
ast: ESQLAst;
|
||||
} {
|
||||
if (text == null) {
|
||||
return { ast: [], errors: [] };
|
||||
}
|
||||
const errorListener = new ESQLErrorListener();
|
||||
const parseListener = new AstListener();
|
||||
const parser = getParser(CharStreams.fromString(text), errorListener, parseListener);
|
||||
|
||||
parser[ROOT_STATEMENT]();
|
||||
|
||||
const errors = errorListener.getErrors().filter((error) => {
|
||||
return !SYNTAX_ERRORS_TO_IGNORE.includes(error.message);
|
||||
});
|
||||
|
||||
return { ...parseListener.getAst(), errors };
|
||||
}
|
|
@ -10,7 +10,7 @@
|
|||
import { Builder } from '.';
|
||||
|
||||
test('can mint a numeric literal', () => {
|
||||
const node = Builder.numericLiteral({ value: 42 });
|
||||
const node = Builder.expression.literal.numeric({ value: 42, literalType: 'integer' });
|
||||
|
||||
expect(node).toMatchObject({
|
||||
type: 'literal',
|
131
packages/kbn-esql-ast/src/builder/builder.ts
Normal file
131
packages/kbn-esql-ast/src/builder/builder.ts
Normal file
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-namespace */
|
||||
|
||||
import {
|
||||
ESQLAstComment,
|
||||
ESQLAstQueryExpression,
|
||||
ESQLCommand,
|
||||
ESQLDecimalLiteral,
|
||||
ESQLInlineCast,
|
||||
ESQLIntegerLiteral,
|
||||
ESQLList,
|
||||
ESQLLocation,
|
||||
ESQLSource,
|
||||
} from '../types';
|
||||
import { AstNodeParserFields, AstNodeTemplate } from './types';
|
||||
|
||||
export namespace Builder {
|
||||
/**
|
||||
* Constructs fields which are only available when the node is minted by
|
||||
* the parser.
|
||||
*/
|
||||
export const parserFields = ({
|
||||
location = { min: 0, max: 0 },
|
||||
text = '',
|
||||
incomplete = false,
|
||||
}: Partial<AstNodeParserFields> = {}): AstNodeParserFields => ({
|
||||
location,
|
||||
text,
|
||||
incomplete,
|
||||
});
|
||||
|
||||
export const command = (
|
||||
template: AstNodeTemplate<ESQLCommand>,
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLCommand => {
|
||||
return {
|
||||
...template,
|
||||
...Builder.parserFields(fromParser),
|
||||
type: 'command',
|
||||
};
|
||||
};
|
||||
|
||||
export const comment = (
|
||||
subtype: ESQLAstComment['subtype'],
|
||||
text: string,
|
||||
location: ESQLLocation
|
||||
): ESQLAstComment => {
|
||||
return {
|
||||
type: 'comment',
|
||||
subtype,
|
||||
text,
|
||||
location,
|
||||
};
|
||||
};
|
||||
|
||||
export namespace expression {
|
||||
export const query = (
|
||||
commands: ESQLAstQueryExpression['commands'] = [],
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLAstQueryExpression => {
|
||||
return {
|
||||
...Builder.parserFields(fromParser),
|
||||
commands,
|
||||
type: 'query',
|
||||
name: '',
|
||||
};
|
||||
};
|
||||
|
||||
export const source = (
|
||||
template: AstNodeTemplate<ESQLSource>,
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLSource => {
|
||||
return {
|
||||
...template,
|
||||
...Builder.parserFields(fromParser),
|
||||
type: 'source',
|
||||
};
|
||||
};
|
||||
|
||||
export const inlineCast = (
|
||||
template: Omit<AstNodeTemplate<ESQLInlineCast>, 'name'>,
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLInlineCast => {
|
||||
return {
|
||||
...template,
|
||||
...Builder.parserFields(fromParser),
|
||||
type: 'inlineCast',
|
||||
name: '',
|
||||
};
|
||||
};
|
||||
|
||||
export namespace literal {
|
||||
/**
|
||||
* Constructs an integer literal node.
|
||||
*/
|
||||
export const numeric = (
|
||||
template: Omit<AstNodeTemplate<ESQLIntegerLiteral | ESQLDecimalLiteral>, 'name'>,
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLIntegerLiteral | ESQLDecimalLiteral => {
|
||||
const node: ESQLIntegerLiteral | ESQLDecimalLiteral = {
|
||||
...template,
|
||||
...Builder.parserFields(fromParser),
|
||||
type: 'literal',
|
||||
name: template.value.toString(),
|
||||
};
|
||||
|
||||
return node;
|
||||
};
|
||||
|
||||
export const list = (
|
||||
template: Omit<AstNodeTemplate<ESQLList>, 'name'>,
|
||||
fromParser?: Partial<AstNodeParserFields>
|
||||
): ESQLList => {
|
||||
return {
|
||||
...template,
|
||||
...Builder.parserFields(fromParser),
|
||||
type: 'list',
|
||||
name: '',
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,42 +7,5 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { ESQLDecimalLiteral, ESQLIntegerLiteral, ESQLNumericLiteralType } from '../types';
|
||||
import { AstNodeParserFields, AstNodeTemplate } from './types';
|
||||
|
||||
export class Builder {
|
||||
/**
|
||||
* Constructs fields which are only available when the node is minted by
|
||||
* the parser.
|
||||
*/
|
||||
public static readonly parserFields = ({
|
||||
location = { min: 0, max: 0 },
|
||||
text = '',
|
||||
incomplete = false,
|
||||
}: Partial<AstNodeParserFields>): AstNodeParserFields => ({
|
||||
location,
|
||||
text,
|
||||
incomplete,
|
||||
});
|
||||
|
||||
/**
|
||||
* Constructs a integer literal node.
|
||||
*/
|
||||
public static readonly numericLiteral = (
|
||||
template: Omit<
|
||||
AstNodeTemplate<ESQLIntegerLiteral | ESQLDecimalLiteral>,
|
||||
'literalType' | 'name'
|
||||
>,
|
||||
type: ESQLNumericLiteralType = 'integer'
|
||||
): ESQLIntegerLiteral | ESQLDecimalLiteral => {
|
||||
const node: ESQLIntegerLiteral | ESQLDecimalLiteral = {
|
||||
...template,
|
||||
...Builder.parserFields(template),
|
||||
type: 'literal',
|
||||
literalType: type,
|
||||
name: template.value.toString(),
|
||||
};
|
||||
|
||||
return node;
|
||||
};
|
||||
}
|
||||
export type * from './types';
|
||||
export { Builder } from './builder';
|
||||
|
|
27
packages/kbn-esql-ast/src/parser/README.md
Normal file
27
packages/kbn-esql-ast/src/parser/README.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
## Comments
|
||||
|
||||
### Inter-node comment places
|
||||
|
||||
Around colon in source identifier:
|
||||
|
||||
```eslq
|
||||
FROM cluster /* comment */ : index
|
||||
```
|
||||
|
||||
Arounds dots in column identifier:
|
||||
|
||||
```eslq
|
||||
KEEP column /* comment */ . subcolumn
|
||||
```
|
||||
|
||||
Cast expressions:
|
||||
|
||||
```eslq
|
||||
STATS "abc":: /* asdf */ integer
|
||||
```
|
||||
|
||||
Time interface expressions:
|
||||
|
||||
```eslq
|
||||
STATS 1 /* asdf */ DAY
|
||||
```
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { parse } from '../parser';
|
||||
|
||||
describe('source nodes', () => {
|
||||
it('cluster vs quoted source', () => {
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('Column Identifier Expressions', () => {
|
||||
it('can parse un-quoted identifiers', () => {
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('commands', () => {
|
||||
describe('correctly formatted, basic usage', () => {
|
940
packages/kbn-esql-ast/src/parser/__tests__/comments.test.ts
Normal file
940
packages/kbn-esql-ast/src/parser/__tests__/comments.test.ts
Normal file
|
@ -0,0 +1,940 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { parse } from '..';
|
||||
|
||||
describe('Comments', () => {
|
||||
describe('can attach "top" comment(s)', () => {
|
||||
it('to a single command', () => {
|
||||
const text = `
|
||||
//comment
|
||||
FROM index`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: 'comment',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('to the second command', () => {
|
||||
const text = `
|
||||
FROM abc
|
||||
|
||||
// Good limit
|
||||
| LIMIT 10`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'limit',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' Good limit',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to a command (multiline)', () => {
|
||||
const text = `
|
||||
FROM abc
|
||||
|
||||
/* Good limit */
|
||||
| LIMIT 10`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'limit',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' Good limit ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to a command (multiple comments)', () => {
|
||||
const text = `
|
||||
FROM abc
|
||||
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
|
||||
| LIMIT 10`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'limit',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 1 ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 2',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 3 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to an expression', () => {
|
||||
const text = `
|
||||
FROM
|
||||
|
||||
// "abc" is the best source
|
||||
abc`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' "abc" is the best source',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to an expression (multiple comments)', () => {
|
||||
const text = `
|
||||
FROM
|
||||
// "abc" is the best source
|
||||
/* another comment */ /* one more */
|
||||
abc`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' "abc" is the best source',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' another comment ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' one more ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to a nested expression', () => {
|
||||
const text = `
|
||||
FROM a
|
||||
| STATS 1 +
|
||||
// 2 is the best number
|
||||
2`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 2 is the best number',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to first binary expression operand', () => {
|
||||
const text = `
|
||||
ROW
|
||||
|
||||
// 1
|
||||
1 +
|
||||
2`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'row',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to first binary expression operand, nested in function', () => {
|
||||
const text = `
|
||||
ROW fn(
|
||||
|
||||
// 1
|
||||
1 +
|
||||
2
|
||||
)`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'row',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: 'fn',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to second binary expression operand', () => {
|
||||
const text = `
|
||||
ROW
|
||||
1 +
|
||||
|
||||
// 2
|
||||
2`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'row',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 2',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to second binary expression operand, nested in function', () => {
|
||||
const text = `
|
||||
ROW fn(
|
||||
1 +
|
||||
|
||||
// 2
|
||||
2
|
||||
)`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'row',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: 'fn',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' 2',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to a function', () => {
|
||||
const text = `
|
||||
ROW
|
||||
// fn comment
|
||||
fn(0)`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[0]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'row',
|
||||
args: [
|
||||
{
|
||||
type: 'function',
|
||||
name: 'fn',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 0,
|
||||
},
|
||||
],
|
||||
formatting: {
|
||||
top: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' fn comment',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('can attach "left" comment(s)', () => {
|
||||
it('to a command', () => {
|
||||
const text = `/* hello */ FROM abc`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
formatting: {
|
||||
left: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' hello ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to an expression, multiple comments', () => {
|
||||
const text = `FROM /* aha */ source, /* 1 */ /*2*/ /* 3 */ abc`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
name: 'source',
|
||||
formatting: {
|
||||
left: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' aha ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'source',
|
||||
name: 'abc',
|
||||
formatting: {
|
||||
left: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 1 ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: '2',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 3 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to sub-expression', () => {
|
||||
const text = `FROM index | STATS 1 + /* aha */ 2`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
left: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' aha ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('can attach "right" comment(s)', () => {
|
||||
it('to an expression', () => {
|
||||
const text = `FROM abc /* hello */`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
name: 'abc',
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' hello ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to an expression, multiple comments', () => {
|
||||
const text = `FROM abc /* a */ /* b */, def /* c */`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
name: 'abc',
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' a ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' b ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'source',
|
||||
name: 'def',
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' c ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to a nested expression', () => {
|
||||
const text = `FROM a | STATS 1 + 2 /* hello */`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[1]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' hello ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to a nested expression - 2', () => {
|
||||
const text = `FROM a | STATS 1 + 2 /* 2 */ + 3`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[1]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 2 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('to a nested expression - 3', () => {
|
||||
const text = `FROM a | STATS 1 /* 1 */ + 2 /* 2.1 */ /* 2.2 */ /* 2.3 */ + 3 /* 3.1 */ /* 3.2 */`;
|
||||
const { root } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(root.commands[1]).toMatchObject({
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 1 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 2.1 ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 2.2 ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 2.3 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 3,
|
||||
formatting: {
|
||||
right: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 3.1 ',
|
||||
},
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'multi-line',
|
||||
text: ' 3.2 ',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('can attach "right end" comments', () => {
|
||||
it('to an expression', () => {
|
||||
const text = `FROM abc // hello`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{
|
||||
type: 'source',
|
||||
name: 'abc',
|
||||
formatting: {
|
||||
rightSingleLine: {
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' hello',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to the second expression', () => {
|
||||
const text = `FROM a1, a2 // hello world
|
||||
| LIMIT 1`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{
|
||||
type: 'command',
|
||||
name: 'from',
|
||||
args: [
|
||||
{},
|
||||
{
|
||||
type: 'source',
|
||||
name: 'a2',
|
||||
formatting: {
|
||||
rightSingleLine: {
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' hello world',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to nested expression', () => {
|
||||
const text = `
|
||||
FROM a
|
||||
| STATS 1 + 2 // hello world
|
||||
`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
formatting: {
|
||||
rightSingleLine: {
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' hello world',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('to nested expression - 2', () => {
|
||||
const text = `
|
||||
FROM a
|
||||
| STATS 1 // The 1 is important
|
||||
+ 2
|
||||
`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'stats',
|
||||
args: [
|
||||
{
|
||||
name: '+',
|
||||
args: [
|
||||
{
|
||||
type: 'literal',
|
||||
value: 1,
|
||||
formatting: {
|
||||
rightSingleLine: {
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' The 1 is important',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'literal',
|
||||
value: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('can attach "bottom" comment(s)', () => {
|
||||
it('attaches comment at the end of the program to the last command node from the "bottom"', () => {
|
||||
const text = `
|
||||
FROM a
|
||||
| LIMIT 1
|
||||
// the end
|
||||
`;
|
||||
const { ast } = parse(text, { withFormatting: true });
|
||||
|
||||
expect(ast).toMatchObject([
|
||||
{},
|
||||
{
|
||||
type: 'command',
|
||||
name: 'limit',
|
||||
formatting: {
|
||||
bottom: [
|
||||
{
|
||||
type: 'comment',
|
||||
subtype: 'single-line',
|
||||
text: ' the end',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('FROM', () => {
|
||||
describe('correctly formatted', () => {
|
|
@ -7,8 +7,8 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { Walker } from '../walker';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
import { Walker } from '../../walker';
|
||||
|
||||
describe('function AST nodes', () => {
|
||||
describe('"variadic-call"', () => {
|
|
@ -7,8 +7,8 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { ESQLFunction, ESQLInlineCast, ESQLSingleAstItem } from '../types';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
import { ESQLFunction, ESQLInlineCast, ESQLSingleAstItem } from '../../types';
|
||||
|
||||
describe('Inline cast (::)', () => {
|
||||
describe('correctly formatted', () => {
|
||||
|
@ -20,7 +20,7 @@ describe('Inline cast (::)', () => {
|
|||
expect(ast[1].args[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
castType: 'string',
|
||||
name: 'inlineCast',
|
||||
name: '',
|
||||
type: 'inlineCast',
|
||||
value: expect.objectContaining({
|
||||
name: 'field',
|
||||
|
@ -38,7 +38,7 @@ describe('Inline cast (::)', () => {
|
|||
expect((ast[1].args[0] as ESQLFunction).args[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
castType: 'long',
|
||||
name: 'inlineCast',
|
||||
name: '',
|
||||
type: 'inlineCast',
|
||||
value: expect.objectContaining({
|
||||
name: 'field',
|
|
@ -7,8 +7,8 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { ESQLLiteral } from '../types';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
import { ESQLLiteral } from '../../types';
|
||||
|
||||
describe('literal expression', () => {
|
||||
it('numeric expression captures "value", and "name" fields', () => {
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('METRICS', () => {
|
||||
describe('correctly formatted', () => {
|
|
@ -7,8 +7,8 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { Walker } from '../walker';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
import { Walker } from '../../walker';
|
||||
|
||||
/**
|
||||
* Un-named parameters are represented by a question mark "?".
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('RENAME', () => {
|
||||
/**
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('SORT', () => {
|
||||
describe('correctly formatted', () => {
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors as parse } from '../ast_parser';
|
||||
import { getAstAndSyntaxErrors as parse } from '..';
|
||||
|
||||
describe('WHERE', () => {
|
||||
describe('correctly formatted', () => {
|
|
@ -7,6 +7,13 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { Token } from 'antlr4';
|
||||
|
||||
/**
|
||||
* The root ANTLR rule to start parsing from.
|
||||
*/
|
||||
export const GRAMMAR_ROOT_RULE = 'singleStatement';
|
||||
|
||||
export const EDITOR_MARKER = 'marker_esql_editor';
|
||||
|
||||
export const TICKS_REGEX = /^`{1}|`{1}$/g;
|
||||
|
@ -14,3 +21,6 @@ export const DOUBLE_TICKS_REGEX = /``/g;
|
|||
export const SINGLE_TICK_REGEX = /`/g;
|
||||
export const SINGLE_BACKTICK = '`';
|
||||
export const DOUBLE_BACKTICK = '``';
|
||||
|
||||
export const DEFAULT_CHANNEL: number = +(Token as any).DEFAULT_CHANNEL;
|
||||
export const HIDDEN_CHANNEL: number = +(Token as any).HIDDEN_CHANNEL;
|
|
@ -32,8 +32,8 @@ import {
|
|||
type MetricsCommandContext,
|
||||
IndexPatternContext,
|
||||
InlinestatsCommandContext,
|
||||
} from './antlr/esql_parser';
|
||||
import { default as ESQLParserListener } from './antlr/esql_parser_listener';
|
||||
} from '../antlr/esql_parser';
|
||||
import { default as ESQLParserListener } from '../antlr/esql_parser_listener';
|
||||
import {
|
||||
createCommand,
|
||||
createFunction,
|
||||
|
@ -42,8 +42,8 @@ import {
|
|||
textExistsAndIsValid,
|
||||
createSource,
|
||||
createAstBaseItem,
|
||||
} from './ast_helpers';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
} from './factories';
|
||||
import { getPosition } from './helpers';
|
||||
import {
|
||||
collectAllSourceIdentifiers,
|
||||
collectAllFields,
|
||||
|
@ -57,10 +57,10 @@ import {
|
|||
getPolicyName,
|
||||
getMatchField,
|
||||
getEnrichClauses,
|
||||
} from './ast_walker';
|
||||
import type { ESQLAst, ESQLAstMetricsCommand } from './types';
|
||||
} from './walkers';
|
||||
import type { ESQLAst, ESQLAstMetricsCommand } from '../types';
|
||||
|
||||
export class AstListener implements ESQLParserListener {
|
||||
export class ESQLAstBuilderListener implements ESQLParserListener {
|
||||
private ast: ESQLAst = [];
|
||||
|
||||
public getAst() {
|
|
@ -9,8 +9,8 @@
|
|||
|
||||
import type { Recognizer, RecognitionException } from 'antlr4';
|
||||
import { ErrorListener } from 'antlr4';
|
||||
import type { EditorError } from './types';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
import { getPosition } from './helpers';
|
||||
import type { EditorError } from '../types';
|
||||
|
||||
const REPLACE_DEV = /,{0,1}(?<!\s)\s*DEV_\w+\s*/g;
|
||||
export class ESQLErrorListener extends ErrorListener<any> {
|
|
@ -11,7 +11,7 @@
|
|||
* In case of changes in the grammar, this script should be updated: esql_update_ast_script.js
|
||||
*/
|
||||
|
||||
import { type Token, type ParserRuleContext, type TerminalNode } from 'antlr4';
|
||||
import type { Token, ParserRuleContext, TerminalNode, RecognitionException } from 'antlr4';
|
||||
import {
|
||||
IndexPatternContext,
|
||||
QualifiedNameContext,
|
||||
|
@ -21,12 +21,10 @@ import {
|
|||
type IntegerValueContext,
|
||||
type QualifiedIntegerLiteralContext,
|
||||
QualifiedNamePatternContext,
|
||||
} from './antlr/esql_parser';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
} from '../antlr/esql_parser';
|
||||
import { DOUBLE_TICKS_REGEX, SINGLE_BACKTICK, TICKS_REGEX } from './constants';
|
||||
import type {
|
||||
ESQLAstBaseItem,
|
||||
ESQLCommand,
|
||||
ESQLLiteral,
|
||||
ESQLList,
|
||||
ESQLTimeInterval,
|
||||
|
@ -43,8 +41,9 @@ import type {
|
|||
FunctionSubtype,
|
||||
ESQLNumericLiteral,
|
||||
ESQLOrderExpression,
|
||||
} from './types';
|
||||
import { parseIdentifier } from './parser/helpers';
|
||||
} from '../types';
|
||||
import { parseIdentifier, getPosition } from './helpers';
|
||||
import { Builder, type AstNodeParserFields } from '../builder';
|
||||
|
||||
export function nonNullable<T>(v: T): v is NonNullable<T> {
|
||||
return v != null;
|
||||
|
@ -62,54 +61,32 @@ export function createAstBaseItem<Name = string>(
|
|||
};
|
||||
}
|
||||
|
||||
export function createCommand(name: string, ctx: ParserRuleContext): ESQLCommand {
|
||||
return {
|
||||
type: 'command',
|
||||
name,
|
||||
text: ctx.getText(),
|
||||
args: [],
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
const createParserFields = (ctx: ParserRuleContext): AstNodeParserFields => ({
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
});
|
||||
|
||||
export function createInlineCast(ctx: InlineCastContext): Omit<ESQLInlineCast, 'value'> {
|
||||
return {
|
||||
type: 'inlineCast',
|
||||
name: 'inlineCast',
|
||||
text: ctx.getText(),
|
||||
castType: ctx.dataType().getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
export const createCommand = (name: string, ctx: ParserRuleContext) =>
|
||||
Builder.command({ name, args: [] }, createParserFields(ctx));
|
||||
|
||||
export function createList(ctx: ParserRuleContext, values: ESQLLiteral[]): ESQLList {
|
||||
return {
|
||||
type: 'list',
|
||||
name: ctx.getText(),
|
||||
values,
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
export const createInlineCast = (ctx: InlineCastContext, value: ESQLInlineCast['value']) =>
|
||||
Builder.expression.inlineCast(
|
||||
{ castType: ctx.dataType().getText(), value },
|
||||
createParserFields(ctx)
|
||||
);
|
||||
|
||||
export function createNumericLiteral(
|
||||
export const createList = (ctx: ParserRuleContext, values: ESQLLiteral[]): ESQLList =>
|
||||
Builder.expression.literal.list({ values }, createParserFields(ctx));
|
||||
|
||||
export const createNumericLiteral = (
|
||||
ctx: DecimalValueContext | IntegerValueContext,
|
||||
literalType: ESQLNumericLiteralType
|
||||
): ESQLLiteral {
|
||||
const text = ctx.getText();
|
||||
return {
|
||||
type: 'literal',
|
||||
literalType,
|
||||
text,
|
||||
name: text,
|
||||
value: Number(text),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
): ESQLLiteral =>
|
||||
Builder.expression.literal.numeric(
|
||||
{ value: Number(ctx.getText()), literalType },
|
||||
createParserFields(ctx)
|
||||
);
|
||||
|
||||
export function createFakeMultiplyLiteral(
|
||||
ctx: ArithmeticUnaryContext,
|
||||
|
@ -496,3 +473,13 @@ export function createUnknownItem(ctx: ParserRuleContext): ESQLUnknownItem {
|
|||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
|
||||
export function createError(exception: RecognitionException) {
|
||||
const token = exception.offendingToken;
|
||||
|
||||
return {
|
||||
type: 'error' as const,
|
||||
text: `SyntaxError: ${exception.message}`,
|
||||
location: getPosition(token),
|
||||
};
|
||||
}
|
270
packages/kbn-esql-ast/src/parser/formatting.ts
Normal file
270
packages/kbn-esql-ast/src/parser/formatting.ts
Normal file
|
@ -0,0 +1,270 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { type CommonTokenStream, Token } from 'antlr4';
|
||||
import { Builder } from '../builder';
|
||||
import { Visitor } from '../visitor';
|
||||
import type {
|
||||
ESQLAstComment,
|
||||
ESQLAstCommentMultiLine,
|
||||
ESQLAstCommentSingleLine,
|
||||
ESQLAstNodeFormatting,
|
||||
ESQLAstQueryExpression,
|
||||
ESQLProperNode,
|
||||
} from '../types';
|
||||
import type {
|
||||
ParsedFormattingCommentDecoration,
|
||||
ParsedFormattingDecoration,
|
||||
ParsedFormattingDecorationLines,
|
||||
} from './types';
|
||||
import { HIDDEN_CHANNEL } from './constants';
|
||||
import { findVisibleToken, isLikelyPunctuation } from './helpers';
|
||||
|
||||
const commentSubtype = (text: string): ESQLAstComment['subtype'] | undefined => {
|
||||
if (text[0] === '/') {
|
||||
if (text[1] === '/') {
|
||||
return 'single-line';
|
||||
}
|
||||
if (text[1] === '*') {
|
||||
const end = text.length - 1;
|
||||
if (text[end] === '/' && text[end - 1] === '*') {
|
||||
return 'multi-line';
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const trimRightNewline = (text: string): string => {
|
||||
const last = text.length - 1;
|
||||
if (text[last] === '\n') {
|
||||
return text.slice(0, last);
|
||||
}
|
||||
return text;
|
||||
};
|
||||
|
||||
/**
|
||||
* Collects *decorations* (all comments and whitespace of interest) from the
|
||||
* token stream.
|
||||
*
|
||||
* @param tokens Lexer token stream
|
||||
* @returns List of comments found in the token stream
|
||||
*/
|
||||
export const collectDecorations = (
|
||||
tokens: CommonTokenStream
|
||||
): { comments: ESQLAstComment[]; lines: ParsedFormattingDecorationLines } => {
|
||||
const comments: ESQLAstComment[] = [];
|
||||
const list = tokens.tokens;
|
||||
const length = list.length;
|
||||
const lines: ParsedFormattingDecorationLines = [];
|
||||
|
||||
let line: ParsedFormattingDecoration[] = [];
|
||||
let pos = 0;
|
||||
let hasContentToLeft = false;
|
||||
|
||||
// The last token in <EOF> token, which we don't need to process.
|
||||
for (let i = 0; i < length - 1; i++) {
|
||||
const token = list[i];
|
||||
const { channel, text } = token;
|
||||
const min = pos;
|
||||
const max = min + text.length;
|
||||
|
||||
pos = max;
|
||||
|
||||
const isContentToken = channel !== HIDDEN_CHANNEL;
|
||||
|
||||
if (isContentToken) {
|
||||
const isPunctuation = isLikelyPunctuation(text);
|
||||
|
||||
if (!isPunctuation) {
|
||||
hasContentToLeft = true;
|
||||
for (const decoration of line) {
|
||||
if (decoration.type === 'comment') {
|
||||
decoration.hasContentToRight = true;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const subtype = commentSubtype(text);
|
||||
const isComment = !!subtype;
|
||||
|
||||
if (!isComment) {
|
||||
const hasLineBreak = text.lastIndexOf('\n') !== -1;
|
||||
|
||||
if (hasLineBreak) {
|
||||
lines.push(line);
|
||||
line = [];
|
||||
hasContentToLeft = false;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const cleanText =
|
||||
subtype === 'single-line' ? trimRightNewline(text.slice(2)) : text.slice(2, -2);
|
||||
const node = Builder.comment(subtype, cleanText, { min, max });
|
||||
const comment: ParsedFormattingCommentDecoration = {
|
||||
type: 'comment',
|
||||
hasContentToLeft,
|
||||
hasContentToRight: false,
|
||||
node,
|
||||
};
|
||||
|
||||
comments.push(comment.node);
|
||||
line.push(comment);
|
||||
|
||||
if (subtype === 'single-line') {
|
||||
const hasLineBreak = text[text.length - 1] === '\n';
|
||||
|
||||
if (hasLineBreak) {
|
||||
lines.push(line);
|
||||
line = [];
|
||||
hasContentToLeft = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
lines.push(line);
|
||||
}
|
||||
|
||||
return { comments, lines };
|
||||
};
|
||||
|
||||
const attachTopComment = (node: ESQLProperNode, comment: ESQLAstComment) => {
|
||||
const formatting: ESQLAstNodeFormatting = node.formatting || (node.formatting = {});
|
||||
const list = formatting.top || (formatting.top = []);
|
||||
list.push(comment);
|
||||
};
|
||||
|
||||
const attachBottomComment = (node: ESQLProperNode, comment: ESQLAstComment) => {
|
||||
const formatting: ESQLAstNodeFormatting = node.formatting || (node.formatting = {});
|
||||
const list = formatting.bottom || (formatting.bottom = []);
|
||||
list.push(comment);
|
||||
};
|
||||
|
||||
const attachLeftComment = (node: ESQLProperNode, comment: ESQLAstCommentMultiLine) => {
|
||||
const formatting: ESQLAstNodeFormatting = node.formatting || (node.formatting = {});
|
||||
const list = formatting.left || (formatting.left = []);
|
||||
list.push(comment);
|
||||
};
|
||||
|
||||
const attachRightComment = (node: ESQLProperNode, comment: ESQLAstCommentMultiLine) => {
|
||||
const formatting: ESQLAstNodeFormatting = node.formatting || (node.formatting = {});
|
||||
const list = formatting.right || (formatting.right = []);
|
||||
list.push(comment);
|
||||
};
|
||||
|
||||
const attachRightEndComment = (node: ESQLProperNode, comment: ESQLAstCommentSingleLine) => {
|
||||
const formatting: ESQLAstNodeFormatting = node.formatting || (node.formatting = {});
|
||||
formatting.rightSingleLine = comment;
|
||||
};
|
||||
|
||||
const attachCommentDecoration = (
|
||||
ast: ESQLAstQueryExpression,
|
||||
tokens: Token[],
|
||||
comment: ParsedFormattingCommentDecoration
|
||||
) => {
|
||||
const commentConsumesWholeLine = !comment.hasContentToLeft && !comment.hasContentToRight;
|
||||
|
||||
if (commentConsumesWholeLine) {
|
||||
const node = Visitor.findNodeAtOrAfter(ast, comment.node.location.max - 1);
|
||||
|
||||
if (!node) {
|
||||
// No node after the comment found, it is probably at the end of the file.
|
||||
// So we attach it to the last command from the "bottom".
|
||||
const commands = ast.commands;
|
||||
const lastCommand = commands[commands.length - 1];
|
||||
if (lastCommand) {
|
||||
attachBottomComment(lastCommand, comment.node);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
attachTopComment(node, comment.node);
|
||||
return;
|
||||
}
|
||||
|
||||
if (comment.hasContentToRight && comment.node.subtype === 'multi-line') {
|
||||
const nodeToRight = Visitor.findNodeAtOrAfter(ast, comment.node.location.max - 1);
|
||||
|
||||
if (!nodeToRight) {
|
||||
const nodeToLeft = Visitor.findNodeAtOrBefore(ast, comment.node.location.min);
|
||||
|
||||
if (nodeToLeft) {
|
||||
attachRightComment(nodeToLeft, comment.node);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const isInsideNode = nodeToRight.location.min <= comment.node.location.min;
|
||||
|
||||
if (isInsideNode) {
|
||||
attachLeftComment(nodeToRight, comment.node);
|
||||
return;
|
||||
}
|
||||
|
||||
const visibleTokenBetweenCommentAndNodeToRight = findVisibleToken(
|
||||
tokens,
|
||||
comment.node.location.max,
|
||||
nodeToRight.location.min - 1
|
||||
);
|
||||
|
||||
if (visibleTokenBetweenCommentAndNodeToRight) {
|
||||
const nodeToLeft = Visitor.findNodeAtOrBefore(ast, comment.node.location.min);
|
||||
|
||||
if (nodeToLeft) {
|
||||
attachRightComment(nodeToLeft, comment.node);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
attachLeftComment(nodeToRight, comment.node);
|
||||
return;
|
||||
}
|
||||
|
||||
if (comment.hasContentToLeft) {
|
||||
const node = Visitor.findNodeAtOrBefore(ast, comment.node.location.min);
|
||||
|
||||
if (!node) return;
|
||||
|
||||
if (comment.node.subtype === 'multi-line') {
|
||||
attachRightComment(node, comment.node);
|
||||
} else if (comment.node.subtype === 'single-line') {
|
||||
attachRightEndComment(node, comment.node);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Walks through the AST and - for each decoration - attaches it to the
|
||||
* appropriate AST node, which is determined by the layout of the source text.
|
||||
*
|
||||
* @param ast AST to attach comments to.
|
||||
* @param comments List of comments to attach to the AST.
|
||||
*/
|
||||
export const attachDecorations = (
|
||||
ast: ESQLAstQueryExpression,
|
||||
tokens: Token[],
|
||||
lines: ParsedFormattingDecorationLines
|
||||
) => {
|
||||
for (const line of lines) {
|
||||
for (const decoration of line) {
|
||||
switch (decoration.type) {
|
||||
case 'comment': {
|
||||
attachCommentDecoration(ast, tokens, decoration);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -7,6 +7,9 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import type { Token } from 'antlr4';
|
||||
import { DEFAULT_CHANNEL } from './constants';
|
||||
|
||||
export const isQuotedIdentifier = (text: string): boolean => {
|
||||
const firstChar = text[0];
|
||||
const lastChar = text[text.length - 1];
|
||||
|
@ -36,3 +39,140 @@ export const formatIdentifier = (text: string): string => {
|
|||
|
||||
export const formatIdentifierParts = (parts: string[]): string =>
|
||||
parts.map(formatIdentifier).join('.');
|
||||
|
||||
export const getPosition = (
|
||||
token: Pick<Token, 'start' | 'stop'> | null,
|
||||
lastToken?: Pick<Token, 'stop'> | undefined
|
||||
) => {
|
||||
if (!token || token.start < 0) {
|
||||
return { min: 0, max: 0 };
|
||||
}
|
||||
const endFirstToken = token.stop > -1 ? Math.max(token.stop + 1, token.start) : undefined;
|
||||
const endLastToken = lastToken?.stop;
|
||||
return {
|
||||
min: token.start,
|
||||
max: endLastToken ?? endFirstToken ?? Infinity,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds all tokens in the given range using binary search. Allows to further
|
||||
* filter the tokens using a predicate.
|
||||
*
|
||||
* @param tokens List of ANTLR tokens.
|
||||
* @param min Text position to start searching from.
|
||||
* @param max Text position to stop searching at.
|
||||
* @param predicate Function to test each token.
|
||||
*/
|
||||
export const findTokens = function* (
|
||||
tokens: Token[],
|
||||
min: number = 0,
|
||||
max: number = tokens.length ? tokens[tokens.length - 1].stop : 0,
|
||||
predicate: (token: Token) => boolean = () => true
|
||||
): Iterable<Token> {
|
||||
let index = 0;
|
||||
let left = 0;
|
||||
let right = tokens.length - 1;
|
||||
|
||||
// Find the first token index.
|
||||
while (left <= right) {
|
||||
const mid = left + Math.floor((right - left) / 2);
|
||||
const token = tokens[mid];
|
||||
|
||||
if (token.start < min) {
|
||||
left = mid + 1;
|
||||
} else if (token.stop > min) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
index = mid;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Return all tokens in the range, which satisfy the predicate.
|
||||
for (; index < tokens.length; index++) {
|
||||
const token = tokens[index];
|
||||
|
||||
if (token.start > max) {
|
||||
break;
|
||||
}
|
||||
if (predicate(token)) {
|
||||
yield token;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the first token in the given range using binary search. Allows to
|
||||
* further filter the tokens using a predicate.
|
||||
*
|
||||
* @param tokens List of ANTLR tokens.
|
||||
* @param min Text position to start searching from.
|
||||
* @param max Text position to stop searching at.
|
||||
* @param predicate Function to test each token.
|
||||
* @returns The first token that matches the predicate or `null` if no token is found.
|
||||
*/
|
||||
export const findFirstToken = (
|
||||
tokens: Token[],
|
||||
min: number = 0,
|
||||
max: number = tokens.length ? tokens[tokens.length - 1].stop : 0,
|
||||
predicate: (token: Token) => boolean = () => true
|
||||
): Token | null => {
|
||||
for (const token of findTokens(tokens, min, max, predicate)) {
|
||||
return token;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the first visible token in the given token range using binary search.
|
||||
*
|
||||
* @param tokens List of ANTLR tokens.
|
||||
* @param min Text position to start searching from.
|
||||
* @param max Text position to stop searching at.
|
||||
* @returns The first punctuation token or `null` if no token is found.
|
||||
*/
|
||||
export const findVisibleToken = (
|
||||
tokens: Token[],
|
||||
min: number = 0,
|
||||
max: number = tokens.length ? tokens[tokens.length - 1].stop : 0
|
||||
): Token | null => {
|
||||
return findFirstToken(
|
||||
tokens,
|
||||
min,
|
||||
max,
|
||||
({ channel, text }) => channel === DEFAULT_CHANNEL && text.length > 0
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* A heuristic set of punctuation characters.
|
||||
*/
|
||||
const punctuationChars = new Set(['.', ',', ';', ':', '(', ')', '[', ']', '{', '}']);
|
||||
|
||||
export const isLikelyPunctuation = (text: string): boolean =>
|
||||
text.length === 1 && punctuationChars.has(text);
|
||||
|
||||
/**
|
||||
* Finds the first punctuation token in the given token range using binary
|
||||
* search.
|
||||
*
|
||||
* @param tokens List of ANTLR tokens.
|
||||
* @param min Text position to start searching from.
|
||||
* @param max Text position to stop searching at.
|
||||
* @returns The first punctuation token or `null` if no token is found.
|
||||
*/
|
||||
export const findPunctuationToken = (
|
||||
tokens: Token[],
|
||||
min: number = 0,
|
||||
max: number = tokens.length ? tokens[tokens.length - 1].stop : 0
|
||||
): Token | null => {
|
||||
return findFirstToken(
|
||||
tokens,
|
||||
min,
|
||||
max,
|
||||
({ channel, text }) =>
|
||||
channel === DEFAULT_CHANNEL && text.length === 1 && punctuationChars.has(text)
|
||||
);
|
||||
};
|
||||
|
|
23
packages/kbn-esql-ast/src/parser/index.ts
Normal file
23
packages/kbn-esql-ast/src/parser/index.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export {
|
||||
getLexer,
|
||||
getParser,
|
||||
createParser,
|
||||
parse,
|
||||
parseErrors,
|
||||
type ParseOptions,
|
||||
type ParseResult,
|
||||
|
||||
/** @deprecated Use `parse` instead. */
|
||||
parse as getAstAndSyntaxErrors,
|
||||
} from './parser';
|
||||
|
||||
export { ESQLErrorListener } from './esql_error_listener';
|
139
packages/kbn-esql-ast/src/parser/parser.ts
Normal file
139
packages/kbn-esql-ast/src/parser/parser.ts
Normal file
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { CharStreams, type Token } from 'antlr4';
|
||||
import { CommonTokenStream, type CharStream, type ErrorListener } from 'antlr4';
|
||||
import { ESQLErrorListener } from './esql_error_listener';
|
||||
import { ESQLAstBuilderListener } from './esql_ast_builder_listener';
|
||||
import { GRAMMAR_ROOT_RULE } from './constants';
|
||||
import { attachDecorations, collectDecorations } from './formatting';
|
||||
import type { ESQLAst, ESQLAstQueryExpression, EditorError } from '../types';
|
||||
import { Builder } from '../builder';
|
||||
import { default as ESQLLexer } from '../antlr/esql_lexer';
|
||||
import { default as ESQLParser } from '../antlr/esql_parser';
|
||||
import { default as ESQLParserListener } from '../antlr/esql_parser_listener';
|
||||
|
||||
export const getLexer = (inputStream: CharStream, errorListener: ErrorListener<any>) => {
|
||||
const lexer = new ESQLLexer(inputStream);
|
||||
|
||||
lexer.removeErrorListeners();
|
||||
lexer.addErrorListener(errorListener);
|
||||
|
||||
return lexer;
|
||||
};
|
||||
|
||||
export const getParser = (
|
||||
inputStream: CharStream,
|
||||
errorListener: ErrorListener<any>,
|
||||
parseListener?: ESQLParserListener
|
||||
) => {
|
||||
const lexer = getLexer(inputStream, errorListener);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new ESQLParser(tokens);
|
||||
|
||||
// lexer.symbolicNames
|
||||
|
||||
parser.removeErrorListeners();
|
||||
parser.addErrorListener(errorListener);
|
||||
|
||||
if (parseListener) {
|
||||
// @ts-expect-error the addParseListener API does exist and is documented here
|
||||
// https://github.com/antlr/antlr4/blob/dev/doc/listeners.md
|
||||
parser.addParseListener(parseListener);
|
||||
}
|
||||
|
||||
return {
|
||||
lexer,
|
||||
tokens,
|
||||
parser,
|
||||
};
|
||||
};
|
||||
|
||||
export const createParser = (text: string) => {
|
||||
const errorListener = new ESQLErrorListener();
|
||||
const parseListener = new ESQLAstBuilderListener();
|
||||
|
||||
return getParser(CharStreams.fromString(text), errorListener, parseListener);
|
||||
};
|
||||
|
||||
// These will need to be manually updated whenever the relevant grammar changes.
|
||||
const SYNTAX_ERRORS_TO_IGNORE = [
|
||||
`SyntaxError: mismatched input '<EOF>' expecting {'explain', 'from', 'meta', 'row', 'show'}`,
|
||||
];
|
||||
|
||||
export interface ParseOptions {
|
||||
/**
|
||||
* Whether to collect and attach to AST nodes user's custom formatting:
|
||||
* comments and whitespace.
|
||||
*/
|
||||
withFormatting?: boolean;
|
||||
}
|
||||
|
||||
export interface ParseResult {
|
||||
/**
|
||||
* The root *QueryExpression* node of the parsed tree.
|
||||
*/
|
||||
root: ESQLAstQueryExpression;
|
||||
|
||||
/**
|
||||
* List of parsed commands.
|
||||
*
|
||||
* @deprecated Use `root` instead.
|
||||
*/
|
||||
ast: ESQLAst;
|
||||
|
||||
/**
|
||||
* List of ANTLR tokens generated by the lexer.
|
||||
*/
|
||||
tokens: Token[];
|
||||
|
||||
/**
|
||||
* List of parsing errors.
|
||||
*/
|
||||
errors: EditorError[];
|
||||
}
|
||||
|
||||
export const parse = (text: string | undefined, options: ParseOptions = {}): ParseResult => {
|
||||
if (text == null) {
|
||||
const commands: ESQLAstQueryExpression['commands'] = [];
|
||||
return { ast: commands, root: Builder.expression.query(commands), errors: [], tokens: [] };
|
||||
}
|
||||
const errorListener = new ESQLErrorListener();
|
||||
const parseListener = new ESQLAstBuilderListener();
|
||||
const { tokens, parser } = getParser(CharStreams.fromString(text), errorListener, parseListener);
|
||||
|
||||
parser[GRAMMAR_ROOT_RULE]();
|
||||
|
||||
const errors = errorListener.getErrors().filter((error) => {
|
||||
return !SYNTAX_ERRORS_TO_IGNORE.includes(error.message);
|
||||
});
|
||||
const { ast: commands } = parseListener.getAst();
|
||||
const root = Builder.expression.query(commands, {
|
||||
location: {
|
||||
min: 0,
|
||||
max: text.length - 1,
|
||||
},
|
||||
});
|
||||
|
||||
if (options.withFormatting) {
|
||||
const decorations = collectDecorations(tokens);
|
||||
attachDecorations(root, tokens.tokens, decorations.lines);
|
||||
}
|
||||
|
||||
return { root, ast: commands, errors, tokens: tokens.tokens };
|
||||
};
|
||||
|
||||
export const parseErrors = (text: string) => {
|
||||
const errorListener = new ESQLErrorListener();
|
||||
const { parser } = getParser(CharStreams.fromString(text), errorListener);
|
||||
|
||||
parser[GRAMMAR_ROOT_RULE]();
|
||||
|
||||
return errorListener.getErrors();
|
||||
};
|
63
packages/kbn-esql-ast/src/parser/types.ts
Normal file
63
packages/kbn-esql-ast/src/parser/types.ts
Normal file
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { ESQLAstComment } from '../types';
|
||||
|
||||
/**
|
||||
* Lines of decorations per *whitespace line*. A *whitespace line* is a line
|
||||
* which tracks line breaks only from the HIDDEN channel. It does not take into
|
||||
* account line breaks from the DEFAULT channel, i.e. content lines. For example,
|
||||
* it will ignore line breaks from triple-quoted strings, but will track line
|
||||
* breaks from comments and whitespace.
|
||||
*
|
||||
* Each list entry represents a line of decorations.
|
||||
*/
|
||||
export type ParsedFormattingDecorationLines = ParsedFormattingDecoration[][];
|
||||
|
||||
/**
|
||||
* A source text decoration that we are interested in.
|
||||
*
|
||||
* - Comments: we preserve user comments when pretty-printing.
|
||||
* - Line breaks: we allow users to specify one custom line break.
|
||||
*/
|
||||
export type ParsedFormattingDecoration =
|
||||
| ParsedFormattingCommentDecoration
|
||||
| ParsedFormattingLineBreakDecoration;
|
||||
|
||||
/**
|
||||
* A comment AST node with additional information about its position in the
|
||||
* source text.
|
||||
*/
|
||||
export interface ParsedFormattingCommentDecoration {
|
||||
type: 'comment';
|
||||
|
||||
/**
|
||||
* Whether the comment has content on the same line to the left of it.
|
||||
*/
|
||||
hasContentToLeft: boolean;
|
||||
|
||||
/**
|
||||
* Whether the comment has content on the same line to the right of it.
|
||||
*/
|
||||
hasContentToRight: boolean;
|
||||
|
||||
/**
|
||||
* The comment AST node.
|
||||
*/
|
||||
node: ESQLAstComment;
|
||||
}
|
||||
|
||||
export interface ParsedFormattingLineBreakDecoration {
|
||||
type: 'line-break';
|
||||
|
||||
/**
|
||||
* The number of line breaks in the source text.
|
||||
*/
|
||||
lines: number;
|
||||
}
|
|
@ -63,7 +63,7 @@ import {
|
|||
InputParamContext,
|
||||
IndexPatternContext,
|
||||
InlinestatsCommandContext,
|
||||
} from './antlr/esql_parser';
|
||||
} from '../antlr/esql_parser';
|
||||
import {
|
||||
createSource,
|
||||
createColumn,
|
||||
|
@ -85,8 +85,9 @@ import {
|
|||
createInlineCast,
|
||||
createUnknownItem,
|
||||
createOrderExpression,
|
||||
} from './ast_helpers';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
} from './factories';
|
||||
import { getPosition } from './helpers';
|
||||
|
||||
import {
|
||||
ESQLLiteral,
|
||||
ESQLColumn,
|
||||
|
@ -99,7 +100,7 @@ import {
|
|||
ESQLPositionalParamLiteral,
|
||||
ESQLNamedParamLiteral,
|
||||
ESQLOrderExpression,
|
||||
} from './types';
|
||||
} from '../types';
|
||||
|
||||
export function collectAllSourceIdentifiers(ctx: FromCommandContext): ESQLAstItem[] {
|
||||
const fromContexts = ctx.getTypedRuleContexts(IndexPatternContext);
|
||||
|
@ -490,10 +491,7 @@ export function visitPrimaryExpression(ctx: PrimaryExpressionContext): ESQLAstIt
|
|||
|
||||
function collectInlineCast(ctx: InlineCastContext): ESQLInlineCast {
|
||||
const primaryExpression = visitPrimaryExpression(ctx.primaryExpression());
|
||||
return {
|
||||
...createInlineCast(ctx),
|
||||
value: primaryExpression,
|
||||
};
|
||||
return createInlineCast(ctx, primaryExpression);
|
||||
}
|
||||
|
||||
export function collectLogicalExpression(ctx: BooleanExpressionContext) {
|
|
@ -0,0 +1,186 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { parse } from '../../parser';
|
||||
import { BasicPrettyPrinter } from '../basic_pretty_printer';
|
||||
|
||||
const reprint = (src: string) => {
|
||||
const { root } = parse(src, { withFormatting: true });
|
||||
const text = BasicPrettyPrinter.print(root);
|
||||
|
||||
// console.log(JSON.stringify(ast, null, 2));
|
||||
|
||||
return { text };
|
||||
};
|
||||
|
||||
const assertPrint = (src: string, expected: string = src) => {
|
||||
const { text } = reprint(src);
|
||||
|
||||
expect(text).toBe(expected);
|
||||
};
|
||||
|
||||
describe('source expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM /* cmt */ expr');
|
||||
});
|
||||
|
||||
test('can print source right comment', () => {
|
||||
assertPrint('FROM expr /* cmt */');
|
||||
});
|
||||
|
||||
test('can print source right comment with comma separating from next source', () => {
|
||||
assertPrint('FROM expr /* cmt */, expr2');
|
||||
});
|
||||
|
||||
test('can print source left and right comments', () => {
|
||||
assertPrint(
|
||||
'FROM /*a*/ /* b */ index1 /* c */, /* d */ index2 /* e */ /* f */, /* g */ index3'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('source column expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM a | STATS /* cmt */ col');
|
||||
});
|
||||
|
||||
test('can print column right comment', () => {
|
||||
assertPrint('FROM a | STATS col /* cmt */');
|
||||
});
|
||||
|
||||
test('can print column left and right comments', () => {
|
||||
assertPrint(
|
||||
'FROM a | STATS /*a*/ /* b */ col /* c */ /* d */, /* e */ col2 /* f */, col3 /* comment3 */, col4'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('literal expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM a | STATS /* cmt */ 1');
|
||||
});
|
||||
|
||||
test('can print column right comment', () => {
|
||||
assertPrint('FROM a | STATS "str" /* cmt */');
|
||||
});
|
||||
|
||||
test('can print column left and right comments', () => {
|
||||
assertPrint(
|
||||
'FROM a | STATS /*a*/ /* b */ TRUE /* c */ /* d */, /* e */ 1.1 /* f */, FALSE /* comment3 */, NULL'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('time interval expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM a | STATS /* cmt */ 1d');
|
||||
});
|
||||
|
||||
test('can print column right comment', () => {
|
||||
assertPrint('FROM a | STATS 2 years /* cmt */');
|
||||
});
|
||||
|
||||
test('can print column left and right comments', () => {
|
||||
assertPrint(
|
||||
'FROM a | STATS /*a*/ /* b */ 2 years /* c */ /* d */, /* e */ 3d /* f */, 1 week /* comment3 */, 1 weeks'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('inline cast expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM a | STATS /* 1 */ /* 2 */ 123::INTEGER /* 3 */');
|
||||
});
|
||||
});
|
||||
|
||||
describe('list literal expression', () => {
|
||||
test('can print source left comment', () => {
|
||||
assertPrint('FROM a | STATS /* 1 */ /* 2 */ [1, 2, 3] /* 3 */');
|
||||
});
|
||||
});
|
||||
|
||||
describe('function call expressions', () => {
|
||||
test('left of function call', () => {
|
||||
assertPrint('FROM a | STATS /* 1 */ FN()');
|
||||
});
|
||||
|
||||
test('right of function call', () => {
|
||||
assertPrint('FROM a | STATS FN() /* asdf */');
|
||||
});
|
||||
|
||||
test('various sides from function calls', () => {
|
||||
assertPrint('FROM a | STATS FN() /* asdf */, /*1*/ FN2() /*2*/, FN3() /*3*/');
|
||||
});
|
||||
|
||||
test('left of function call, when function as an argument', () => {
|
||||
assertPrint('FROM a | STATS /* 1 */ FN(1)');
|
||||
});
|
||||
|
||||
test('right comments respect function bracket', () => {
|
||||
assertPrint('FROM a | STATS FN(1 /* 1 */) /* 2 */');
|
||||
});
|
||||
|
||||
test('around function argument', () => {
|
||||
assertPrint('FROM a | STATS /*1*/ FN(/*2*/ 1 /*3*/) /*4*/');
|
||||
});
|
||||
|
||||
test('around function arguments', () => {
|
||||
assertPrint('FROM a | STATS /*1*/ FN(/*2*/ 1 /*3*/, /*4*/ /*5*/ 2 /*6*/ /*7*/) /*8*/');
|
||||
});
|
||||
});
|
||||
|
||||
describe('binary expressions', () => {
|
||||
test('around binary expression operands', () => {
|
||||
assertPrint('FROM a | STATS /* a */ 1 /* b */ + /* c */ 2 /* d */');
|
||||
});
|
||||
|
||||
test('around binary expression operands, twice', () => {
|
||||
assertPrint('FROM a | STATS /* a */ 1 /* b */ + /* c */ 2 /* d */ + /* e */ 3 /* f */');
|
||||
});
|
||||
|
||||
test('around binary expression operands, trice', () => {
|
||||
assertPrint(
|
||||
'FROM a | STATS /* a */ /* a.2 */ 1 /* b */ + /* c */ 2 /* d */ + /* e */ 3 /* f */ + /* g */ 4 /* h */ /* h.2 */'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unary expressions', () => {
|
||||
test('around binary expression operands', () => {
|
||||
assertPrint('FROM a | STATS /* a */ NOT /* b */ 1 /* c */');
|
||||
});
|
||||
|
||||
test('around binary expression operands, with trailing argument', () => {
|
||||
assertPrint('FROM a | STATS /* a */ NOT /* b */ 1 /* c */, 2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('post-fix unary expressions', () => {
|
||||
test('around binary expression operands', () => {
|
||||
assertPrint('FROM a | STATS /*I*/ 0 /*II*/ IS NULL /*III*/');
|
||||
});
|
||||
|
||||
test('around binary expression operands, with surrounding args', () => {
|
||||
assertPrint('FROM a | STATS FN(1, /*I*/ 0 /*II*/ IS NULL /*III*/, 2)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename expressions', () => {
|
||||
test('around the rename expression', () => {
|
||||
assertPrint('FROM a | RENAME /*I*/ a AS b /*II*/');
|
||||
});
|
||||
|
||||
test('around two rename expressions', () => {
|
||||
assertPrint('FROM a | RENAME /*I*/ a AS b /*II*/, /*III*/ c AS d /*IV*/');
|
||||
});
|
||||
|
||||
test('inside rename expression', () => {
|
||||
assertPrint('FROM a | RENAME /*I*/ a /*II*/ AS /*III*/ b /*IV*/, c AS d');
|
||||
});
|
||||
});
|
|
@ -7,14 +7,14 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../../ast_parser';
|
||||
import { parse } from '../../parser';
|
||||
import { ESQLFunction } from '../../types';
|
||||
import { Walker } from '../../walker';
|
||||
import { BasicPrettyPrinter, BasicPrettyPrinterMultilineOptions } from '../basic_pretty_printer';
|
||||
|
||||
const reprint = (src: string) => {
|
||||
const { ast } = getAstAndSyntaxErrors(src);
|
||||
const text = BasicPrettyPrinter.print(ast);
|
||||
const { root } = parse(src);
|
||||
const text = BasicPrettyPrinter.print(root);
|
||||
|
||||
// console.log(JSON.stringify(ast, null, 2));
|
||||
|
||||
|
@ -372,17 +372,17 @@ describe('single line query', () => {
|
|||
|
||||
describe('cast expressions', () => {
|
||||
test('various', () => {
|
||||
expect(reprint('ROW a::string').text).toBe('ROW a::string');
|
||||
expect(reprint('ROW 123::string').text).toBe('ROW 123::string');
|
||||
expect(reprint('ROW "asdf"::number').text).toBe('ROW "asdf"::number');
|
||||
expect(reprint('ROW a::string').text).toBe('ROW a::STRING');
|
||||
expect(reprint('ROW 123::string').text).toBe('ROW 123::STRING');
|
||||
expect(reprint('ROW "asdf"::number').text).toBe('ROW "asdf"::NUMBER');
|
||||
});
|
||||
|
||||
test('wraps into rackets complex cast expressions', () => {
|
||||
expect(reprint('ROW (1 + 2)::string').text).toBe('ROW (1 + 2)::string');
|
||||
expect(reprint('ROW (1 + 2)::string').text).toBe('ROW (1 + 2)::STRING');
|
||||
});
|
||||
|
||||
test('does not wrap function call', () => {
|
||||
expect(reprint('ROW fn()::string').text).toBe('ROW FN()::string');
|
||||
expect(reprint('ROW fn()::string').text).toBe('ROW FN()::STRING');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -404,8 +404,8 @@ describe('single line query', () => {
|
|||
|
||||
describe('multiline query', () => {
|
||||
const multiline = (src: string, opts?: BasicPrettyPrinterMultilineOptions) => {
|
||||
const { ast } = getAstAndSyntaxErrors(src);
|
||||
const text = BasicPrettyPrinter.multiline(ast, opts);
|
||||
const { root } = parse(src);
|
||||
const text = BasicPrettyPrinter.multiline(root, opts);
|
||||
|
||||
// console.log(JSON.stringify(ast, null, 2));
|
||||
|
||||
|
@ -478,7 +478,9 @@ describe('single line command', () => {
|
|||
| EVAL avg_salary = ROUND(avg_salary)
|
||||
| SORT hired, languages
|
||||
| LIMIT 100`;
|
||||
const { ast: commands } = getAstAndSyntaxErrors(query);
|
||||
const {
|
||||
root: { commands },
|
||||
} = parse(query);
|
||||
const line1 = BasicPrettyPrinter.command(commands[0]);
|
||||
const line2 = BasicPrettyPrinter.command(commands[1]);
|
||||
const line3 = BasicPrettyPrinter.command(commands[2]);
|
||||
|
@ -496,9 +498,9 @@ describe('single line command', () => {
|
|||
describe('single line expression', () => {
|
||||
test('can print a single expression', () => {
|
||||
const query = `FROM a | STATS a != 1, avg(1, 2, 3)`;
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const comparison = Walker.match(ast, { type: 'function', name: '!=' })! as ESQLFunction;
|
||||
const func = Walker.match(ast, { type: 'function', name: 'avg' })! as ESQLFunction;
|
||||
const { root } = parse(query);
|
||||
const comparison = Walker.match(root, { type: 'function', name: '!=' })! as ESQLFunction;
|
||||
const func = Walker.match(root, { type: 'function', name: 'avg' })! as ESQLFunction;
|
||||
|
||||
const text1 = BasicPrettyPrinter.expression(comparison);
|
||||
const text2 = BasicPrettyPrinter.expression(func);
|
||||
|
@ -507,3 +509,5 @@ describe('single line expression', () => {
|
|||
expect(text2).toBe('AVG(1, 2, 3)');
|
||||
});
|
||||
});
|
||||
|
||||
it.todo('test for NOT unary expression');
|
||||
|
|
|
@ -0,0 +1,545 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { parse } from '../../parser';
|
||||
import { WrappingPrettyPrinter, WrappingPrettyPrinterOptions } from '../wrapping_pretty_printer';
|
||||
|
||||
const reprint = (src: string, opts?: WrappingPrettyPrinterOptions) => {
|
||||
const { root } = parse(src, { withFormatting: true });
|
||||
const text = WrappingPrettyPrinter.print(root, opts);
|
||||
|
||||
return { text };
|
||||
};
|
||||
|
||||
describe('commands', () => {
|
||||
describe('top comments', () => {
|
||||
test('preserves single command top comment', () => {
|
||||
const query = `
|
||||
//comment
|
||||
FROM index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
//comment
|
||||
FROM index`);
|
||||
});
|
||||
|
||||
test('over second command', () => {
|
||||
const query = `
|
||||
FROM index |
|
||||
//comment
|
||||
LIMIT 123
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
//comment
|
||||
| LIMIT 123`);
|
||||
});
|
||||
|
||||
test('over the last command', () => {
|
||||
const query = `
|
||||
FROM index | SORT abc |
|
||||
//comment
|
||||
LIMIT 123
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| SORT abc
|
||||
//comment
|
||||
| LIMIT 123`);
|
||||
});
|
||||
|
||||
test('multiple comments over multiple commands', () => {
|
||||
const query = `
|
||||
// 1
|
||||
// 2
|
||||
/* 3 */
|
||||
FROM index
|
||||
/* 1
|
||||
2
|
||||
3 */
|
||||
// sort
|
||||
/* sort 2 */
|
||||
| SORT abc
|
||||
|
|
||||
//comment
|
||||
/* limit */
|
||||
// LIMIT
|
||||
LIMIT 123
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
// 1
|
||||
// 2
|
||||
/* 3 */
|
||||
FROM index
|
||||
/* 1
|
||||
2
|
||||
3 */
|
||||
// sort
|
||||
/* sort 2 */
|
||||
| SORT abc
|
||||
//comment
|
||||
/* limit */
|
||||
// LIMIT
|
||||
| LIMIT 123`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('expressions', () => {
|
||||
describe('source expression', () => {
|
||||
describe('top comments', () => {
|
||||
test('single line comment', () => {
|
||||
const query = `
|
||||
FROM
|
||||
|
||||
// the comment
|
||||
index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM
|
||||
// the comment
|
||||
index`);
|
||||
});
|
||||
|
||||
test('multi line comment', () => {
|
||||
const query = `
|
||||
FROM
|
||||
|
||||
/* the comment */
|
||||
index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM
|
||||
/* the comment */
|
||||
index`);
|
||||
});
|
||||
|
||||
test('multiple comments', () => {
|
||||
const query = `
|
||||
FROM
|
||||
|
||||
// 1
|
||||
/* 2 */
|
||||
// 3
|
||||
/* 4 */
|
||||
index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM
|
||||
// 1
|
||||
/* 2 */
|
||||
// 3
|
||||
/* 4 */
|
||||
index`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('left comments', () => {
|
||||
test('single left comment', () => {
|
||||
const query = `
|
||||
FROM /*1*/ index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM /*1*/ index`);
|
||||
});
|
||||
|
||||
test('multiple left comments', () => {
|
||||
const query = `
|
||||
FROM /*1*/ /*2*/ /*3*/ index
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM /*1*/ /*2*/ /*3*/ index`);
|
||||
});
|
||||
|
||||
test('multiple left comments, and multiple arguments', () => {
|
||||
const query = `
|
||||
FROM index1, /*1*/ /*2*/ /*3*/ index2, index3
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index1, /*1*/ /*2*/ /*3*/ index2, index3`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('right comments', () => {
|
||||
test('single multi-line right comment', () => {
|
||||
const query = `
|
||||
FROM index /*1*/
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index /*1*/`);
|
||||
});
|
||||
|
||||
test('multiple multi-line right comments', () => {
|
||||
const query = `
|
||||
FROM index /*1*/ /*2*/ /*3*/
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index /*1*/ /*2*/ /*3*/`);
|
||||
});
|
||||
|
||||
test('multiple multi-line right comment and multiple arguments', () => {
|
||||
const query = `
|
||||
FROM index1, index2 /*1*/ /*2*/ /*3*/, index3
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index1, index2 /*1*/ /*2*/ /*3*/, index3`);
|
||||
});
|
||||
|
||||
test('a single-line comment', () => {
|
||||
const query = `
|
||||
FROM index1 // 1
|
||||
, index2
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM
|
||||
index1, // 1
|
||||
index2`);
|
||||
});
|
||||
});
|
||||
|
||||
test('surrounding source from three sides', () => {
|
||||
const query = `
|
||||
FROM index0,
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ index1, /* 7 */ /* 8 */ // 9
|
||||
index2
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM
|
||||
index0,
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ index1, /* 7 */ /* 8 */ // 9
|
||||
index2`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('column expression', () => {
|
||||
test('surrounded from three sides', () => {
|
||||
const query = `
|
||||
FROM index | KEEP
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ field /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| KEEP
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ field /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
|
||||
test('nested in function', () => {
|
||||
const query = `
|
||||
FROM index | STATS fn(
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ field /* 7 */ /* 8 */ // 9
|
||||
)`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| STATS
|
||||
FN(
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ field /* 7 */ /* 8 */ // 9
|
||||
)`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('literal expressions', () => {
|
||||
test('numeric literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 123 /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 123 /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
|
||||
test('string literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ "asdf" /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ "asdf" /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
|
||||
// Enable this test once triple quoted strings are properly supported
|
||||
test.skip('triple quoted string literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ """a
|
||||
b""" /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ """a\nb""" /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('time interval literal expressions', () => {
|
||||
test('numeric literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 1 day /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 1 day /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('inline cast expressions', () => {
|
||||
test('numeric literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 1::INTEGER /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ 1::INTEGER /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('list literal expressions', () => {
|
||||
test('numeric list literal, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ [1, 2, 3] /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ [1, 2, 3] /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename expressions', () => {
|
||||
test('rename expression, surrounded from three sides', () => {
|
||||
const query = `
|
||||
ROW 1 | RENAME
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ a AS b /* 7 */ /* 8 */ // 9`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW 1
|
||||
| RENAME
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ a AS
|
||||
b /* 7 */ /* 8 */ // 9`);
|
||||
});
|
||||
|
||||
test('rename expression, surrounded from three sides with comments, and between other expressions', () => {
|
||||
const query = `
|
||||
ROW 1 | RENAME
|
||||
x AS y,
|
||||
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ a AS b /* 7 */ /* 8 */, // 9
|
||||
|
||||
x AS y
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW 1
|
||||
| RENAME
|
||||
x AS y,
|
||||
/* 1 */
|
||||
// 2
|
||||
/* 3 */
|
||||
// 4
|
||||
/* 5 */ /* 6 */ a AS
|
||||
b, /* 7 */ /* 8 */ // 9
|
||||
x AS y`);
|
||||
});
|
||||
|
||||
test('rename operands surrounds from all sides', () => {
|
||||
const query = `
|
||||
ROW 1 | RENAME
|
||||
x AS y,
|
||||
/* 1 */
|
||||
/* 2 */ a /* 3 */ AS
|
||||
|
||||
/* 4 */
|
||||
/* 5 */ b, /* 6 */
|
||||
x AS y`;
|
||||
const text = reprint(query).text;
|
||||
expect('\n' + text).toBe(`
|
||||
ROW 1
|
||||
| RENAME
|
||||
x AS y,
|
||||
/* 1 */
|
||||
/* 2 */ a /* 3 */ AS
|
||||
/* 4 */
|
||||
/* 5 */ b, /* 6 */
|
||||
x AS y`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('function call expressions', () => {
|
||||
describe('binary expressions', () => {
|
||||
test('first operand surrounded by inline comments', () => {
|
||||
const query = `ROW /* 1 */ /* 2 */ 1 /* 3 */ /* 4 */ + 2`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect(text).toBe(`ROW /* 1 */ /* 2 */ 1 /* 3 */ /* 4 */ + 2`);
|
||||
});
|
||||
|
||||
test('second operand surrounded by inline comments', () => {
|
||||
const query = `ROW 1 * /* 1 */ /* 2 */ 2 /* 3 */ /* 4 */`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect(text).toBe(`ROW 1 * /* 1 */ /* 2 */ 2 /* 3 */ /* 4 */`);
|
||||
});
|
||||
|
||||
test('first operand with top comment', () => {
|
||||
const query = `ROW
|
||||
// One is important here
|
||||
1 +
|
||||
2`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
// One is important here
|
||||
1 +
|
||||
2`);
|
||||
});
|
||||
|
||||
test('second operand with top comment', () => {
|
||||
const query = `ROW
|
||||
1 +
|
||||
// Two is more important here
|
||||
2`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
ROW
|
||||
1 +
|
||||
// Two is more important here
|
||||
2`);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -7,12 +7,12 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../../ast_parser';
|
||||
import { parse } from '../../parser';
|
||||
import { WrappingPrettyPrinter, WrappingPrettyPrinterOptions } from '../wrapping_pretty_printer';
|
||||
|
||||
const reprint = (src: string, opts?: WrappingPrettyPrinterOptions) => {
|
||||
const { ast } = getAstAndSyntaxErrors(src);
|
||||
const text = WrappingPrettyPrinter.print(ast, opts);
|
||||
const { root } = parse(src);
|
||||
const text = WrappingPrettyPrinter.print(root, opts);
|
||||
|
||||
// console.log(JSON.stringify(ast, null, 2));
|
||||
|
||||
|
@ -216,6 +216,20 @@ FROM index1, index2, index2, index3, index4, index5, index6
|
|||
METADATA _id, _source`);
|
||||
});
|
||||
|
||||
test("indents options such that they don't align with sub-commands", () => {
|
||||
const query = `
|
||||
FROM index1, index2, index2, index3, index4, index5, index6 METADATA _id, _source
|
||||
| WHERE language == "javascript"
|
||||
| LIMIT 123`;
|
||||
const text = reprint(query, { pipeTab: ' ' }).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index1, index2, index2, index3, index4, index5, index6
|
||||
METADATA _id, _source
|
||||
| WHERE language == "javascript"
|
||||
| LIMIT 123`);
|
||||
});
|
||||
|
||||
test('indents METADATA option differently than the LIMIT pipe', () => {
|
||||
const query = `
|
||||
FROM index1, index2, index2, index3, index4, index5, index6 METADATA _id, _source | LIMIT 10`;
|
||||
|
@ -346,6 +360,19 @@ FROM index
|
|||
| LIMIT 10`);
|
||||
});
|
||||
|
||||
test('single long function argument is broken by line', () => {
|
||||
const query = `
|
||||
FROM index | STATS super_function("xxxx-xxxx-xxxxxxxxxxxx-xxxx-xxxxxxxx-xxxx-xxxx-xxxxxxxxxxxx-xxxx-xxxxxxxx")
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
||||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| STATS
|
||||
SUPER_FUNCTION(
|
||||
"xxxx-xxxx-xxxxxxxxxxxx-xxxx-xxxxxxxx-xxxx-xxxx-xxxxxxxxxxxx-xxxx-xxxxxxxx")`);
|
||||
});
|
||||
|
||||
test('break by line function arguments, when wrapping is not enough', () => {
|
||||
const query = `
|
||||
FROM index
|
||||
|
@ -474,7 +501,7 @@ FROM index
|
|||
test('binary expressions of different precedence are not flattened', () => {
|
||||
const query = `
|
||||
FROM index
|
||||
| STATS super_function_name(0.123123123123123 + 888811112.232323123123 * 123123123123.123123123 + 23232323.23232323123 - 123 + 999)),
|
||||
| STATS fn(123456789 + 123456789 - 123456789 + 123456789 - 123456789 + 123456789 - 123456789)),
|
||||
| LIMIT 10
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
@ -482,12 +509,14 @@ FROM index
|
|||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| STATS
|
||||
SUPER_FUNCTION_NAME(
|
||||
0.123123123123123 +
|
||||
888811112.2323232 * 123123123123.12312 +
|
||||
23232323.232323233 -
|
||||
123 +
|
||||
999)`);
|
||||
FN(
|
||||
123456789 +
|
||||
123456789 -
|
||||
123456789 +
|
||||
123456789 -
|
||||
123456789 +
|
||||
123456789 -
|
||||
123456789)`);
|
||||
});
|
||||
|
||||
test('binary expressions vertical flattening child function function argument wrapping', () => {
|
||||
|
@ -513,7 +542,7 @@ FROM index
|
|||
test('two binary expression lists of different precedence group', () => {
|
||||
const query = `
|
||||
FROM index
|
||||
| STATS super_function_name(11111111111111.111 + 3333333333333.3333 * 3333333333333.3333 * 3333333333333.3333 * 3333333333333.3333 + 11111111111111.111 + 11111111111111.111)),
|
||||
| STATS fn(11111111111111.111 + 3333333333333.3333 * 3333333333333.3333 * 3333333333333.3333 * 3333333333333.3333 + 11111111111111.111 + 11111111111111.111)),
|
||||
| LIMIT 10
|
||||
`;
|
||||
const text = reprint(query).text;
|
||||
|
@ -521,7 +550,7 @@ FROM index
|
|||
expect('\n' + text).toBe(`
|
||||
FROM index
|
||||
| STATS
|
||||
SUPER_FUNCTION_NAME(
|
||||
FN(
|
||||
11111111111111.111 +
|
||||
3333333333333.3335 *
|
||||
3333333333333.3335 *
|
||||
|
@ -565,3 +594,5 @@ ROW (asdf + asdf)::string, 1.2::string, "1234"::integer, (12321342134 + 23412341
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
test.todo('Idempotence on multiple times pretty printing');
|
||||
|
|
|
@ -8,17 +8,10 @@
|
|||
*/
|
||||
|
||||
import { binaryExpressionGroup } from '../ast/helpers';
|
||||
import { ESQLAstCommand } from '../types';
|
||||
import { ESQLAstExpressionNode, ESQLAstQueryNode, Visitor } from '../visitor';
|
||||
import { ESQLAstBaseItem, ESQLAstCommand, ESQLAstQueryExpression } from '../types';
|
||||
import { ESQLAstExpressionNode, Visitor } from '../visitor';
|
||||
import { LeafPrinter } from './leaf_printer';
|
||||
|
||||
/**
|
||||
* @todo
|
||||
*
|
||||
* 1. Add support for binary expression wrapping into brackets, due to operator
|
||||
* precedence.
|
||||
*/
|
||||
|
||||
export interface BasicPrettyPrinterOptions {
|
||||
/**
|
||||
* Whether to break the query into multiple lines on each pipe. Defaults to
|
||||
|
@ -66,7 +59,7 @@ export class BasicPrettyPrinter {
|
|||
* @returns A single-line string representation of the query.
|
||||
*/
|
||||
public static readonly print = (
|
||||
query: ESQLAstQueryNode,
|
||||
query: ESQLAstQueryExpression,
|
||||
opts?: BasicPrettyPrinterOptions
|
||||
): string => {
|
||||
const printer = new BasicPrettyPrinter(opts);
|
||||
|
@ -82,7 +75,7 @@ export class BasicPrettyPrinter {
|
|||
* @returns A multi-line string representation of the query.
|
||||
*/
|
||||
public static readonly multiline = (
|
||||
query: ESQLAstQueryNode,
|
||||
query: ESQLAstQueryExpression,
|
||||
opts?: BasicPrettyPrinterMultilineOptions
|
||||
): string => {
|
||||
const printer = new BasicPrettyPrinter({ ...opts, multiline: true });
|
||||
|
@ -133,15 +126,56 @@ export class BasicPrettyPrinter {
|
|||
: word.toUpperCase();
|
||||
}
|
||||
|
||||
protected decorateWithComments(node: ESQLAstBaseItem, formatted: string): string {
|
||||
const formatting = node.formatting;
|
||||
|
||||
if (!formatting) {
|
||||
return formatted;
|
||||
}
|
||||
|
||||
if (formatting.left) {
|
||||
const comments = LeafPrinter.commentList(formatting.left);
|
||||
|
||||
if (comments) {
|
||||
formatted = `${comments} ${formatted}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.right) {
|
||||
const comments = LeafPrinter.commentList(formatting.right);
|
||||
|
||||
if (comments) {
|
||||
formatted = `${formatted} ${comments}`;
|
||||
}
|
||||
}
|
||||
|
||||
return formatted;
|
||||
}
|
||||
|
||||
protected readonly visitor: Visitor<any> = new Visitor()
|
||||
.on('visitExpression', (ctx) => {
|
||||
return '<EXPRESSION>';
|
||||
})
|
||||
|
||||
.on('visitSourceExpression', (ctx) => LeafPrinter.source(ctx.node))
|
||||
.on('visitColumnExpression', (ctx) => LeafPrinter.column(ctx.node))
|
||||
.on('visitLiteralExpression', (ctx) => LeafPrinter.literal(ctx.node))
|
||||
.on('visitTimeIntervalLiteralExpression', (ctx) => LeafPrinter.timeInterval(ctx.node))
|
||||
.on('visitSourceExpression', (ctx) => {
|
||||
const formatted = LeafPrinter.source(ctx.node);
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitColumnExpression', (ctx) => {
|
||||
const formatted = LeafPrinter.column(ctx.node);
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitLiteralExpression', (ctx) => {
|
||||
const formatted = LeafPrinter.literal(ctx.node);
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitTimeIntervalLiteralExpression', (ctx) => {
|
||||
const formatted = LeafPrinter.timeInterval(ctx.node);
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitInlineCastExpression', (ctx) => {
|
||||
const value = ctx.value();
|
||||
|
@ -156,7 +190,10 @@ export class BasicPrettyPrinter {
|
|||
valueFormatted = `(${valueFormatted})`;
|
||||
}
|
||||
|
||||
return `${valueFormatted}::${ctx.node.castType}`;
|
||||
const typeName = this.keyword(ctx.node.castType);
|
||||
const formatted = `${valueFormatted}::${typeName}`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitListLiteralExpression', (ctx) => {
|
||||
|
@ -166,7 +203,9 @@ export class BasicPrettyPrinter {
|
|||
elements += (elements ? ', ' : '') + arg;
|
||||
}
|
||||
|
||||
return `[${elements}]`;
|
||||
const formatted = `[${elements}]`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitFunctionCallExpression', (ctx) => {
|
||||
|
@ -179,12 +218,16 @@ export class BasicPrettyPrinter {
|
|||
case 'unary-expression': {
|
||||
operator = this.keyword(operator);
|
||||
|
||||
return `${operator} ${ctx.visitArgument(0, undefined)}`;
|
||||
const formatted = `${operator} ${ctx.visitArgument(0, undefined)}`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
}
|
||||
case 'postfix-unary-expression': {
|
||||
operator = this.keyword(operator);
|
||||
|
||||
return `${ctx.visitArgument(0)} ${operator}`;
|
||||
const formatted = `${ctx.visitArgument(0)} ${operator}`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
}
|
||||
case 'binary-expression': {
|
||||
operator = this.keyword(operator);
|
||||
|
@ -207,7 +250,7 @@ export class BasicPrettyPrinter {
|
|||
|
||||
const formatted = `${leftFormatted} ${operator} ${rightFormatted}`;
|
||||
|
||||
return formatted;
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
}
|
||||
default: {
|
||||
if (opts.lowercaseFunctions) {
|
||||
|
@ -220,13 +263,17 @@ export class BasicPrettyPrinter {
|
|||
args += (args ? ', ' : '') + arg;
|
||||
}
|
||||
|
||||
return `${operator}(${args})`;
|
||||
const formatted = `${operator}(${args})`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
.on('visitRenameExpression', (ctx) => {
|
||||
return `${ctx.visitArgument(0)} ${this.keyword('AS')} ${ctx.visitArgument(1)}`;
|
||||
const formatted = `${ctx.visitArgument(0)} ${this.keyword('AS')} ${ctx.visitArgument(1)}`;
|
||||
|
||||
return this.decorateWithComments(ctx.node, formatted);
|
||||
})
|
||||
|
||||
.on('visitOrderExpression', (ctx) => {
|
||||
|
@ -295,7 +342,7 @@ export class BasicPrettyPrinter {
|
|||
return text;
|
||||
});
|
||||
|
||||
public print(query: ESQLAstQueryNode) {
|
||||
public print(query: ESQLAstQueryExpression) {
|
||||
return this.visitor.visitQuery(query, undefined);
|
||||
}
|
||||
|
||||
|
|
79
packages/kbn-esql-ast/src/pretty_print/helpers.ts
Normal file
79
packages/kbn-esql-ast/src/pretty_print/helpers.ts
Normal file
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { ESQLAstBaseItem, ESQLProperNode } from '../types';
|
||||
import { Walker } from '../walker';
|
||||
|
||||
export interface QueryPrettyPrintStats {
|
||||
/**
|
||||
* `true` if the given AST has a line breaking decoration. A line breaking
|
||||
* decoration is any decoration that requires a newline "\n" to be printed.
|
||||
*/
|
||||
hasLineBreakingDecorations: boolean;
|
||||
|
||||
/**
|
||||
* Whether the given AST has at least one single line comment to the right of
|
||||
* some node.
|
||||
*/
|
||||
hasRightSingleLineComments: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walks once the given AST sub-tree and computes the pretty print stats.
|
||||
*
|
||||
* @param ast The part to compute the stats for.
|
||||
*/
|
||||
export const getPrettyPrintStats = (ast: ESQLProperNode): QueryPrettyPrintStats => {
|
||||
const stats: QueryPrettyPrintStats = {
|
||||
hasLineBreakingDecorations: false,
|
||||
hasRightSingleLineComments: false,
|
||||
};
|
||||
|
||||
Walker.walk(ast, {
|
||||
visitAny: (node) => {
|
||||
if (hasLineBreakingDecoration(node)) {
|
||||
stats.hasLineBreakingDecorations = true;
|
||||
}
|
||||
if (!!node.formatting?.rightSingleLine) {
|
||||
stats.hasRightSingleLineComments = true;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return stats;
|
||||
};
|
||||
|
||||
export const hasLineBreakingDecoration = (node: ESQLAstBaseItem): boolean => {
|
||||
const formatting = node.formatting;
|
||||
|
||||
if (!formatting) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
(!!formatting.top && formatting.top.length > 0) ||
|
||||
(!!formatting.bottom && formatting.bottom.length > 0) ||
|
||||
!!formatting.rightSingleLine
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (const decoration of [...(formatting.left ?? []), ...(formatting.right ?? [])]) {
|
||||
if (
|
||||
decoration.type === 'comment' &&
|
||||
decoration.subtype === 'multi-line' &&
|
||||
!decoration.text.includes('\n')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
21
packages/kbn-esql-ast/src/pretty_print/index.ts
Normal file
21
packages/kbn-esql-ast/src/pretty_print/index.ts
Normal file
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { LeafPrinter } from './leaf_printer';
|
||||
|
||||
export {
|
||||
BasicPrettyPrinter,
|
||||
type BasicPrettyPrinterOptions,
|
||||
type BasicPrettyPrinterMultilineOptions,
|
||||
} from './basic_pretty_printer';
|
||||
|
||||
export {
|
||||
WrappingPrettyPrinter,
|
||||
type WrappingPrettyPrinterOptions,
|
||||
} from './wrapping_pretty_printer';
|
|
@ -7,7 +7,14 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { ESQLColumn, ESQLLiteral, ESQLSource, ESQLTimeInterval } from '../types';
|
||||
import {
|
||||
ESQLAstComment,
|
||||
ESQLAstCommentMultiLine,
|
||||
ESQLColumn,
|
||||
ESQLLiteral,
|
||||
ESQLSource,
|
||||
ESQLTimeInterval,
|
||||
} from '../types';
|
||||
|
||||
const regexUnquotedIdPattern = /^([a-z\*_\@]{1})[a-z0-9_\*]*$/i;
|
||||
|
||||
|
@ -84,4 +91,27 @@ export const LeafPrinter = {
|
|||
return `${quantity} ${unit}`;
|
||||
}
|
||||
},
|
||||
|
||||
comment: (node: ESQLAstComment): string => {
|
||||
switch (node.subtype) {
|
||||
case 'single-line': {
|
||||
return `//${node.text}`;
|
||||
}
|
||||
case 'multi-line': {
|
||||
return `/*${node.text}*/`;
|
||||
}
|
||||
default: {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
commentList: (comments: ESQLAstCommentMultiLine[]): string => {
|
||||
let text = '';
|
||||
for (const comment of comments) {
|
||||
const commentText = LeafPrinter.comment(comment);
|
||||
if (commentText) text += (text ? ' ' : '') + commentText;
|
||||
}
|
||||
return text;
|
||||
},
|
||||
};
|
||||
|
|
|
@ -9,16 +9,17 @@
|
|||
|
||||
import { BinaryExpressionGroup } from '../ast/constants';
|
||||
import { binaryExpressionGroup, isBinaryExpression } from '../ast/helpers';
|
||||
import type { ESQLAstBaseItem, ESQLAstQueryExpression } from '../types';
|
||||
import {
|
||||
CommandOptionVisitorContext,
|
||||
CommandVisitorContext,
|
||||
ESQLAstQueryNode,
|
||||
ExpressionVisitorContext,
|
||||
FunctionCallExpressionVisitorContext,
|
||||
Visitor,
|
||||
} from '../visitor';
|
||||
import { singleItems } from '../visitor/utils';
|
||||
import { BasicPrettyPrinter, BasicPrettyPrinterOptions } from './basic_pretty_printer';
|
||||
import { getPrettyPrintStats } from './helpers';
|
||||
import { LeafPrinter } from './leaf_printer';
|
||||
|
||||
/**
|
||||
|
@ -51,11 +52,24 @@ interface Input {
|
|||
* ```
|
||||
*/
|
||||
flattenBinExpOfType?: BinaryExpressionGroup;
|
||||
|
||||
/**
|
||||
* Suffix text to append to the formatted output, before any comment
|
||||
* decorations.
|
||||
*/
|
||||
suffix?: string;
|
||||
}
|
||||
|
||||
interface Output {
|
||||
txt: string;
|
||||
lines?: number;
|
||||
|
||||
/**
|
||||
* Whether the node is returned already indented. This is done, when the
|
||||
* node, for example, line braking decorations (multi-line comments), then
|
||||
* the node and its decorations are returned already indented.
|
||||
*/
|
||||
indented?: boolean;
|
||||
}
|
||||
|
||||
export interface WrappingPrettyPrinterOptions extends BasicPrettyPrinterOptions {
|
||||
|
@ -96,7 +110,7 @@ export interface WrappingPrettyPrinterOptions extends BasicPrettyPrinterOptions
|
|||
|
||||
export class WrappingPrettyPrinter {
|
||||
public static readonly print = (
|
||||
query: ESQLAstQueryNode,
|
||||
query: ESQLAstQueryExpression,
|
||||
opts?: WrappingPrettyPrinterOptions
|
||||
): string => {
|
||||
const printer = new WrappingPrettyPrinter(opts);
|
||||
|
@ -138,8 +152,12 @@ export class WrappingPrettyPrinter {
|
|||
const groupLeft = binaryExpressionGroup(left);
|
||||
const groupRight = binaryExpressionGroup(right);
|
||||
const continueVerticalFlattening = group && inp.flattenBinExpOfType === group;
|
||||
const suffix = inp.suffix ?? '';
|
||||
const oneArgumentPerLine =
|
||||
getPrettyPrintStats(left).hasLineBreakingDecorations ||
|
||||
getPrettyPrintStats(right).hasLineBreakingDecorations;
|
||||
|
||||
if (continueVerticalFlattening) {
|
||||
if (continueVerticalFlattening || oneArgumentPerLine) {
|
||||
const parent = ctx.parent?.node;
|
||||
const isLeftChild = isBinaryExpression(parent) && parent.args[0] === node;
|
||||
const leftInput: Input = {
|
||||
|
@ -147,17 +165,25 @@ export class WrappingPrettyPrinter {
|
|||
remaining: inp.remaining,
|
||||
flattenBinExpOfType: group,
|
||||
};
|
||||
const rightTab = isLeftChild ? this.opts.tab : '';
|
||||
const rightIndent = inp.indent + rightTab + (oneArgumentPerLine ? this.opts.tab : '');
|
||||
const rightInput: Input = {
|
||||
indent: inp.indent + this.opts.tab,
|
||||
indent: rightIndent,
|
||||
remaining: inp.remaining - this.opts.tab.length,
|
||||
flattenBinExpOfType: group,
|
||||
};
|
||||
const leftOut = ctx.visitArgument(0, leftInput);
|
||||
const rightOut = ctx.visitArgument(1, rightInput);
|
||||
const rightTab = isLeftChild ? this.opts.tab : '';
|
||||
const txt = `${leftOut.txt} ${operator}\n${inp.indent}${rightTab}${rightOut.txt}`;
|
||||
|
||||
return { txt };
|
||||
let txt = `${leftOut.txt} ${operator}\n`;
|
||||
|
||||
if (!rightOut.indented) {
|
||||
txt += rightIndent;
|
||||
}
|
||||
|
||||
txt += rightOut.txt + suffix;
|
||||
|
||||
return { txt, indented: leftOut.indented };
|
||||
}
|
||||
|
||||
let txt: string = '';
|
||||
|
@ -175,8 +201,10 @@ export class WrappingPrettyPrinter {
|
|||
const length = leftFormatted.length + rightFormatted.length + operator.length + 2;
|
||||
const fitsOnOneLine = length <= inp.remaining;
|
||||
|
||||
let indented = false;
|
||||
|
||||
if (fitsOnOneLine) {
|
||||
txt = `${leftFormatted} ${operator} ${rightFormatted}`;
|
||||
txt = `${leftFormatted} ${operator} ${rightFormatted}${suffix}`;
|
||||
} else {
|
||||
const flattenVertically = group === groupLeft || group === groupRight;
|
||||
const flattenBinExpOfType = flattenVertically ? group : undefined;
|
||||
|
@ -193,10 +221,17 @@ export class WrappingPrettyPrinter {
|
|||
const leftOut = ctx.visitArgument(0, leftInput);
|
||||
const rightOut = ctx.visitArgument(1, rightInput);
|
||||
|
||||
txt = `${leftOut.txt} ${operator}\n${inp.indent}${this.opts.tab}${rightOut.txt}`;
|
||||
txt = `${leftOut.txt} ${operator}\n`;
|
||||
|
||||
if (!rightOut.indented) {
|
||||
txt += `${inp.indent}${this.opts.tab}`;
|
||||
}
|
||||
|
||||
txt += `${rightOut.txt}${suffix}`;
|
||||
indented = leftOut.indented;
|
||||
}
|
||||
|
||||
return { txt };
|
||||
return { txt, indented };
|
||||
}
|
||||
|
||||
private printArguments(
|
||||
|
@ -212,53 +247,63 @@ export class WrappingPrettyPrinter {
|
|||
let remainingCurrentLine = inp.remaining;
|
||||
let oneArgumentPerLine = false;
|
||||
|
||||
ARGS: for (const arg of singleItems(ctx.arguments())) {
|
||||
if (arg.type === 'option') {
|
||||
continue;
|
||||
for (const child of singleItems(ctx.node.args)) {
|
||||
if (getPrettyPrintStats(child).hasLineBreakingDecorations) {
|
||||
oneArgumentPerLine = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const formattedArg = BasicPrettyPrinter.expression(arg, this.opts);
|
||||
const formattedArgLength = formattedArg.length;
|
||||
const needsWrap = remainingCurrentLine < formattedArgLength;
|
||||
if (formattedArgLength > largestArg) {
|
||||
largestArg = formattedArgLength;
|
||||
}
|
||||
let separator = txt ? ',' : '';
|
||||
let fragment = '';
|
||||
|
||||
if (needsWrap) {
|
||||
separator +=
|
||||
'\n' +
|
||||
inp.indent +
|
||||
this.opts.tab +
|
||||
(ctx instanceof CommandVisitorContext ? this.opts.commandTab : '');
|
||||
fragment = separator + formattedArg;
|
||||
lines++;
|
||||
if (argsPerLine > maxArgsPerLine) {
|
||||
maxArgsPerLine = argsPerLine;
|
||||
if (!oneArgumentPerLine) {
|
||||
ARGS: for (const arg of singleItems(ctx.arguments())) {
|
||||
if (arg.type === 'option') {
|
||||
continue;
|
||||
}
|
||||
if (argsPerLine < minArgsPerLine) {
|
||||
minArgsPerLine = argsPerLine;
|
||||
if (minArgsPerLine < 2) {
|
||||
oneArgumentPerLine = true;
|
||||
break ARGS;
|
||||
|
||||
const formattedArg = BasicPrettyPrinter.expression(arg, this.opts);
|
||||
const formattedArgLength = formattedArg.length;
|
||||
const needsWrap = remainingCurrentLine < formattedArgLength;
|
||||
if (formattedArgLength > largestArg) {
|
||||
largestArg = formattedArgLength;
|
||||
}
|
||||
let separator = txt ? ',' : '';
|
||||
let fragment = '';
|
||||
|
||||
if (needsWrap) {
|
||||
separator +=
|
||||
'\n' +
|
||||
inp.indent +
|
||||
this.opts.tab +
|
||||
(ctx instanceof CommandVisitorContext ? this.opts.commandTab : '');
|
||||
fragment = separator + formattedArg;
|
||||
lines++;
|
||||
if (argsPerLine > maxArgsPerLine) {
|
||||
maxArgsPerLine = argsPerLine;
|
||||
}
|
||||
if (argsPerLine < minArgsPerLine) {
|
||||
minArgsPerLine = argsPerLine;
|
||||
if (minArgsPerLine < 2) {
|
||||
oneArgumentPerLine = true;
|
||||
break ARGS;
|
||||
}
|
||||
}
|
||||
remainingCurrentLine =
|
||||
inp.remaining - formattedArgLength - this.opts.tab.length - this.opts.commandTab.length;
|
||||
argsPerLine = 1;
|
||||
} else {
|
||||
argsPerLine++;
|
||||
fragment = separator + (separator ? ' ' : '') + formattedArg;
|
||||
remainingCurrentLine -= fragment.length;
|
||||
}
|
||||
remainingCurrentLine =
|
||||
inp.remaining - formattedArgLength - this.opts.tab.length - this.opts.commandTab.length;
|
||||
argsPerLine = 1;
|
||||
} else {
|
||||
argsPerLine++;
|
||||
fragment = separator + (separator ? ' ' : '') + formattedArg;
|
||||
remainingCurrentLine -= fragment.length;
|
||||
txt += fragment;
|
||||
}
|
||||
txt += fragment;
|
||||
}
|
||||
|
||||
let indent = inp.indent + this.opts.tab;
|
||||
|
||||
if (ctx instanceof CommandVisitorContext) {
|
||||
const isFirstCommand = (ctx.parent?.node as ESQLAstQueryNode)?.[0] === ctx.node;
|
||||
const isFirstCommand =
|
||||
(ctx.parent?.node as ESQLAstQueryExpression)?.commands?.[0] === ctx.node;
|
||||
if (!isFirstCommand) {
|
||||
indent += this.opts.commandTab;
|
||||
}
|
||||
|
@ -266,48 +311,146 @@ export class WrappingPrettyPrinter {
|
|||
|
||||
if (oneArgumentPerLine) {
|
||||
lines = 1;
|
||||
txt = ctx instanceof CommandVisitorContext ? indent : '\n' + indent;
|
||||
let i = 0;
|
||||
for (const arg of ctx.visitArguments({
|
||||
indent,
|
||||
remaining: this.opts.wrap - indent.length,
|
||||
})) {
|
||||
txt = ctx instanceof CommandVisitorContext ? '' : '\n';
|
||||
const args = [...ctx.arguments()].filter((arg) => {
|
||||
if (arg.type === 'option') return arg.name === 'as';
|
||||
return true;
|
||||
});
|
||||
const length = args.length;
|
||||
const last = length - 1;
|
||||
for (let i = 0; i <= last; i++) {
|
||||
const isFirstArg = i === 0;
|
||||
const separator = isFirstArg ? '' : ',\n' + indent;
|
||||
txt += separator + arg.txt;
|
||||
const isLastArg = i === last;
|
||||
const arg = ctx.visitExpression(args[i], {
|
||||
indent,
|
||||
remaining: this.opts.wrap - indent.length,
|
||||
suffix: isLastArg ? '' : ',',
|
||||
});
|
||||
const separator = isFirstArg ? '' : '\n';
|
||||
const indentation = arg.indented ? '' : indent;
|
||||
txt += separator + indentation + arg.txt;
|
||||
lines++;
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
return { txt, lines, indent, oneArgumentPerLine };
|
||||
}
|
||||
|
||||
protected printTopDecorations(indent: string, node: ESQLAstBaseItem): string {
|
||||
const formatting = node.formatting;
|
||||
|
||||
if (!formatting || !formatting.top || !formatting.top.length) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let txt = '';
|
||||
|
||||
for (const decoration of formatting.top) {
|
||||
if (decoration.type === 'comment') {
|
||||
txt += indent + LeafPrinter.comment(decoration) + '\n';
|
||||
}
|
||||
}
|
||||
|
||||
return txt;
|
||||
}
|
||||
|
||||
protected decorateWithComments(
|
||||
indent: string,
|
||||
node: ESQLAstBaseItem,
|
||||
txt: string,
|
||||
indented: boolean = false
|
||||
): { txt: string; indented: boolean } {
|
||||
const formatting = node.formatting;
|
||||
|
||||
if (!formatting) {
|
||||
return { txt, indented };
|
||||
}
|
||||
|
||||
if (formatting.left) {
|
||||
const comments = LeafPrinter.commentList(formatting.left);
|
||||
|
||||
if (comments) {
|
||||
indented = true;
|
||||
txt = `${indent}${comments} ${txt}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.top) {
|
||||
const top = formatting.top;
|
||||
const length = top.length;
|
||||
|
||||
for (let i = length - 1; i >= 0; i--) {
|
||||
const decoration = top[i];
|
||||
|
||||
if (decoration.type === 'comment') {
|
||||
if (!indented) {
|
||||
txt = indent + txt;
|
||||
indented = true;
|
||||
}
|
||||
txt = indent + LeafPrinter.comment(decoration) + '\n' + txt;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.right) {
|
||||
const comments = LeafPrinter.commentList(formatting.right);
|
||||
|
||||
if (comments) {
|
||||
txt = `${txt} ${comments}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.rightSingleLine) {
|
||||
const comment = LeafPrinter.comment(formatting.rightSingleLine);
|
||||
|
||||
txt += ` ${comment}`;
|
||||
}
|
||||
|
||||
if (formatting.bottom) {
|
||||
for (const decoration of formatting.bottom) {
|
||||
if (decoration.type === 'comment') {
|
||||
indented = true;
|
||||
txt = txt + '\n' + indent + LeafPrinter.comment(decoration);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { txt, indented };
|
||||
}
|
||||
|
||||
protected readonly visitor = new Visitor()
|
||||
.on('visitExpression', (ctx, inp: Input): Output => {
|
||||
const txt = ctx.node.text ?? '<EXPRESSION>';
|
||||
return { txt };
|
||||
})
|
||||
|
||||
.on(
|
||||
'visitSourceExpression',
|
||||
(ctx, inp: Input): Output => ({ txt: LeafPrinter.source(ctx.node) })
|
||||
)
|
||||
.on('visitSourceExpression', (ctx, inp: Input): Output => {
|
||||
const formatted = LeafPrinter.source(ctx.node) + (inp.suffix ?? '');
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
.on(
|
||||
'visitColumnExpression',
|
||||
(ctx, inp: Input): Output => ({ txt: LeafPrinter.column(ctx.node) })
|
||||
)
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on(
|
||||
'visitLiteralExpression',
|
||||
(ctx, inp: Input): Output => ({ txt: LeafPrinter.literal(ctx.node) })
|
||||
)
|
||||
.on('visitColumnExpression', (ctx, inp: Input): Output => {
|
||||
const formatted = LeafPrinter.column(ctx.node) + (inp.suffix ?? '');
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
.on(
|
||||
'visitTimeIntervalLiteralExpression',
|
||||
(ctx, inp: Input): Output => ({ txt: LeafPrinter.timeInterval(ctx.node) })
|
||||
)
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitLiteralExpression', (ctx, inp: Input): Output => {
|
||||
const formatted = LeafPrinter.literal(ctx.node) + (inp.suffix ?? '');
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitTimeIntervalLiteralExpression', (ctx, inp: Input): Output => {
|
||||
const formatted = LeafPrinter.timeInterval(ctx.node) + (inp.suffix ?? '');
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitInlineCastExpression', (ctx, inp: Input): Output => {
|
||||
const value = ctx.value();
|
||||
|
@ -326,26 +469,36 @@ export class WrappingPrettyPrinter {
|
|||
valueFormatted = `(${valueFormatted})`;
|
||||
}
|
||||
|
||||
const txt = `${valueFormatted}::${ctx.node.castType}`;
|
||||
const formatted = `${valueFormatted}::${ctx.node.castType}${inp.suffix ?? ''}`;
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
return { txt };
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitRenameExpression', (ctx, inp: Input): Output => {
|
||||
const operator = this.keyword('AS');
|
||||
const expression = this.visitBinaryExpression(ctx, operator, inp);
|
||||
const { txt, indented } = this.decorateWithComments(
|
||||
inp.indent,
|
||||
ctx.node,
|
||||
expression.txt,
|
||||
expression.indented
|
||||
);
|
||||
|
||||
return this.visitBinaryExpression(ctx, operator, inp);
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitListLiteralExpression', (ctx, inp: Input): Output => {
|
||||
let elements = '';
|
||||
|
||||
for (const out of ctx.visitElements()) {
|
||||
for (const out of ctx.visitElements(inp)) {
|
||||
elements += (elements ? ', ' : '') + out.txt;
|
||||
}
|
||||
|
||||
const txt = `[${elements}]`;
|
||||
return { txt };
|
||||
const formatted = `[${elements}]${inp.suffix ?? ''}`;
|
||||
const { txt, indented } = this.decorateWithComments(inp.indent, ctx.node, formatted);
|
||||
|
||||
return { txt, indented };
|
||||
})
|
||||
|
||||
.on('visitFunctionCallExpression', (ctx, inp: Input): Output => {
|
||||
|
@ -363,7 +516,8 @@ export class WrappingPrettyPrinter {
|
|||
break;
|
||||
}
|
||||
case 'postfix-unary-expression': {
|
||||
txt = `${ctx.visitArgument(0, inp).txt} ${operator}`;
|
||||
const suffix = inp.suffix ?? '';
|
||||
txt = `${ctx.visitArgument(0, { ...inp, suffix: '' }).txt} ${operator}${suffix}`;
|
||||
break;
|
||||
}
|
||||
case 'binary-expression': {
|
||||
|
@ -375,7 +529,19 @@ export class WrappingPrettyPrinter {
|
|||
remaining: inp.remaining - operator.length - 1,
|
||||
});
|
||||
|
||||
txt = `${operator}(${args.txt})`;
|
||||
let breakClosingParenthesis = false;
|
||||
|
||||
if (getPrettyPrintStats(ctx.node).hasRightSingleLineComments) {
|
||||
breakClosingParenthesis = true;
|
||||
}
|
||||
|
||||
let closingParenthesisFormatted = ')';
|
||||
|
||||
if (breakClosingParenthesis) {
|
||||
closingParenthesisFormatted = '\n' + inp.indent + ')';
|
||||
}
|
||||
|
||||
txt = `${operator}(${args.txt}${closingParenthesisFormatted}${inp.suffix ?? ''}`;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -433,6 +599,7 @@ export class WrappingPrettyPrinter {
|
|||
const optionsWithWhitespace = options
|
||||
? `${breakOptions ? '\n' + optionIndent : ' '}${options}`
|
||||
: '';
|
||||
|
||||
const txt = `${cmd}${argsWithWhitespace}${optionsWithWhitespace}`;
|
||||
|
||||
return { txt, lines: args.lines /* add options lines count */ };
|
||||
|
@ -441,9 +608,18 @@ export class WrappingPrettyPrinter {
|
|||
.on('visitQuery', (ctx) => {
|
||||
const opts = this.opts;
|
||||
const indent = opts.indent ?? '';
|
||||
const commandCount = ctx.node.length;
|
||||
const commands = ctx.node.commands;
|
||||
const commandCount = commands.length;
|
||||
|
||||
let multiline = opts.multiline ?? commandCount > 3;
|
||||
|
||||
if (!multiline) {
|
||||
const stats = getPrettyPrintStats(ctx.node);
|
||||
if (stats.hasLineBreakingDecorations) {
|
||||
multiline = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!multiline) {
|
||||
const oneLine = indent + BasicPrettyPrinter.print(ctx.node, opts);
|
||||
if (oneLine.length <= opts.wrap) {
|
||||
|
@ -454,18 +630,37 @@ export class WrappingPrettyPrinter {
|
|||
}
|
||||
|
||||
let text = indent;
|
||||
const cmdSeparator = multiline ? `\n${indent}${opts.pipeTab ?? ' '}| ` : ' | ';
|
||||
const pipedCommandIndent = `${indent}${opts.pipeTab ?? ' '}`;
|
||||
const cmdSeparator = multiline ? `${pipedCommandIndent}| ` : ' | ';
|
||||
let i = 0;
|
||||
let prevOut: Output | undefined;
|
||||
|
||||
for (const out of ctx.visitCommands({ indent, remaining: opts.wrap - indent.length })) {
|
||||
const isFirstCommand = i === 0;
|
||||
const isSecondCommand = i === 1;
|
||||
|
||||
if (isSecondCommand) {
|
||||
const firstCommandIsMultiline = prevOut?.lines && prevOut.lines > 1;
|
||||
if (firstCommandIsMultiline) text += '\n' + indent;
|
||||
}
|
||||
const isFirstCommand = i === 0;
|
||||
if (!isFirstCommand) text += cmdSeparator;
|
||||
|
||||
const commandIndent = isFirstCommand ? indent : pipedCommandIndent;
|
||||
const topDecorations = this.printTopDecorations(commandIndent, commands[i]);
|
||||
|
||||
if (topDecorations) {
|
||||
if (!isFirstCommand) {
|
||||
text += '\n';
|
||||
}
|
||||
text += topDecorations;
|
||||
}
|
||||
|
||||
if (!isFirstCommand) {
|
||||
if (multiline && !topDecorations) {
|
||||
text += '\n';
|
||||
}
|
||||
text += cmdSeparator;
|
||||
}
|
||||
|
||||
text += out.txt;
|
||||
i++;
|
||||
prevOut = out;
|
||||
|
@ -474,7 +669,7 @@ export class WrappingPrettyPrinter {
|
|||
return text;
|
||||
});
|
||||
|
||||
public print(query: ESQLAstQueryNode) {
|
||||
public print(query: ESQLAstQueryExpression) {
|
||||
return this.visitor.visitQuery(query);
|
||||
}
|
||||
}
|
||||
|
|
10
packages/kbn-esql-ast/src/query/index.ts
Normal file
10
packages/kbn-esql-ast/src/query/index.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { EsqlQuery } from './query';
|
49
packages/kbn-esql-ast/src/query/query.ts
Normal file
49
packages/kbn-esql-ast/src/query/query.ts
Normal file
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import type { Token } from 'antlr4';
|
||||
import { ParseOptions, parse } from '../parser';
|
||||
import type { ESQLAstQueryExpression } from '../types';
|
||||
import {
|
||||
WrappingPrettyPrinter,
|
||||
WrappingPrettyPrinterOptions,
|
||||
} from '../pretty_print/wrapping_pretty_printer';
|
||||
|
||||
export class EsqlQuery {
|
||||
public static readonly fromSrc = (src: string, opts?: ParseOptions): EsqlQuery => {
|
||||
const { root, tokens } = parse(src, opts);
|
||||
return new EsqlQuery(root, src, tokens);
|
||||
};
|
||||
|
||||
constructor(
|
||||
/**
|
||||
* The parsed or programmatically created ES|QL AST. The AST is the only
|
||||
* required property for the query and is the source of truth for the query.
|
||||
*/
|
||||
public readonly ast: ESQLAstQueryExpression,
|
||||
|
||||
/**
|
||||
* Optional source code that was used to generate the AST. Provide this
|
||||
* if the query was created from a parsed source code. Otherwise, set to
|
||||
* an empty string.
|
||||
*/
|
||||
public readonly src: string = '',
|
||||
|
||||
/**
|
||||
* Optional array of ANTLR tokens, in case the query was parsed from a
|
||||
* source code.
|
||||
*/
|
||||
public readonly tokens: Token[] = []
|
||||
) {}
|
||||
|
||||
public print(opts?: WrappingPrettyPrinterOptions): string {
|
||||
const printer = new WrappingPrettyPrinter(opts);
|
||||
return printer.print(this.ast);
|
||||
}
|
||||
}
|
|
@ -11,21 +11,23 @@ export type ESQLAst = ESQLAstCommand[];
|
|||
|
||||
export type ESQLAstCommand = ESQLCommand | ESQLAstMetricsCommand;
|
||||
|
||||
export type ESQLAstNode = ESQLAstCommand | ESQLAstItem;
|
||||
export type ESQLAstNode = ESQLAstCommand | ESQLAstExpression | ESQLAstItem;
|
||||
|
||||
/**
|
||||
* Represents an *expression* in the AST.
|
||||
*/
|
||||
export type ESQLAstExpression = ESQLSingleAstItem | ESQLAstQueryExpression;
|
||||
|
||||
export type ESQLSingleAstItem =
|
||||
| ESQLFunction // "function call expression"
|
||||
| ESQLFunction
|
||||
| ESQLCommandOption
|
||||
| ESQLSource // "source identifier expression"
|
||||
| ESQLColumn // "field identifier expression"
|
||||
| ESQLSource
|
||||
| ESQLColumn
|
||||
| ESQLTimeInterval
|
||||
| ESQLList // "list expression"
|
||||
| ESQLLiteral // "literal expression"
|
||||
| ESQLList
|
||||
| ESQLLiteral
|
||||
| ESQLCommandMode
|
||||
| ESQLInlineCast // "inline cast expression"
|
||||
| ESQLInlineCast
|
||||
| ESQLOrderExpression
|
||||
| ESQLUnknownItem;
|
||||
|
||||
|
@ -44,7 +46,7 @@ export type ESQLAstNodeWithArgs = ESQLCommand | ESQLCommandOption | ESQLFunction
|
|||
* of the nodes which are plain arrays, all nodes will be *proper* and we can
|
||||
* remove this type.
|
||||
*/
|
||||
export type ESQLProperNode = ESQLSingleAstItem | ESQLAstCommand;
|
||||
export type ESQLProperNode = ESQLAstExpression | ESQLAstCommand;
|
||||
|
||||
export interface ESQLLocation {
|
||||
min: number;
|
||||
|
@ -56,6 +58,18 @@ export interface ESQLAstBaseItem<Name = string> {
|
|||
text: string;
|
||||
location: ESQLLocation;
|
||||
incomplete: boolean;
|
||||
formatting?: ESQLAstNodeFormatting;
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains optional formatting information used by the pretty printer.
|
||||
*/
|
||||
export interface ESQLAstNodeFormatting {
|
||||
top?: ESQLAstComment[];
|
||||
left?: ESQLAstCommentMultiLine[];
|
||||
right?: ESQLAstCommentMultiLine[];
|
||||
rightSingleLine?: ESQLAstCommentSingleLine;
|
||||
bottom?: ESQLAstComment[];
|
||||
}
|
||||
|
||||
export interface ESQLCommand<Name = string> extends ESQLAstBaseItem<Name> {
|
||||
|
@ -86,6 +100,11 @@ export interface ESQLCommandMode extends ESQLAstBaseItem {
|
|||
type: 'mode';
|
||||
}
|
||||
|
||||
export interface ESQLAstQueryExpression extends ESQLAstBaseItem<''> {
|
||||
type: 'query';
|
||||
commands: ESQLAstCommand[];
|
||||
}
|
||||
|
||||
/**
|
||||
* We coalesce all function calls and expressions into a single "function"
|
||||
* node type. This subtype is used to distinguish between different types
|
||||
|
@ -356,3 +375,14 @@ export interface EditorError {
|
|||
code?: string;
|
||||
severity: 'error' | 'warning' | number;
|
||||
}
|
||||
|
||||
export interface ESQLAstGenericComment<SubType extends 'single-line' | 'multi-line'> {
|
||||
type: 'comment';
|
||||
subtype: SubType;
|
||||
text: string;
|
||||
location: ESQLLocation;
|
||||
}
|
||||
|
||||
export type ESQLAstCommentSingleLine = ESQLAstGenericComment<'single-line'>;
|
||||
export type ESQLAstCommentMultiLine = ESQLAstGenericComment<'multi-line'>;
|
||||
export type ESQLAstComment = ESQLAstCommentSingleLine | ESQLAstCommentMultiLine;
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../../ast_parser';
|
||||
import { parse } from '../../parser';
|
||||
import { Visitor } from '../visitor';
|
||||
|
||||
test('"visitExpression" captures all non-captured expressions', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index
|
||||
| STATS 1, "str", [true], a = b BY field
|
||||
| LIMIT 123
|
||||
|
@ -36,7 +36,7 @@ test('"visitExpression" captures all non-captured expressions', () => {
|
|||
|
||||
test('can terminate walk early, does not visit all literals', () => {
|
||||
const numbers: number[] = [];
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index
|
||||
| STATS 0, 1, 2, 3
|
||||
| LIMIT 123
|
||||
|
@ -62,7 +62,7 @@ test('can terminate walk early, does not visit all literals', () => {
|
|||
});
|
||||
|
||||
test('"visitColumnExpression" takes over all column visits', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index | STATS a
|
||||
`);
|
||||
const visitor = new Visitor()
|
||||
|
@ -85,7 +85,7 @@ test('"visitColumnExpression" takes over all column visits', () => {
|
|||
});
|
||||
|
||||
test('"visitSourceExpression" takes over all source visits', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index
|
||||
| STATS 1, "str", [true], a = b BY field
|
||||
| LIMIT 123
|
||||
|
@ -110,7 +110,7 @@ test('"visitSourceExpression" takes over all source visits', () => {
|
|||
});
|
||||
|
||||
test('"visitFunctionCallExpression" takes over all literal visits', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index
|
||||
| STATS 1, "str", [true], a = b BY field
|
||||
| LIMIT 123
|
||||
|
@ -135,7 +135,7 @@ test('"visitFunctionCallExpression" takes over all literal visits', () => {
|
|||
});
|
||||
|
||||
test('"visitLiteral" takes over all literal visits', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { ast } = parse(`
|
||||
FROM index
|
||||
| STATS 1, "str", [true], a = b BY field
|
||||
| LIMIT 123
|
||||
|
|
|
@ -14,12 +14,12 @@
|
|||
* visitor to traverse the AST and make changes to it, or how to extract useful
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../../ast_parser';
|
||||
import { ESQLAstQueryNode } from '../types';
|
||||
import { parse } from '../../parser';
|
||||
import { ESQLAstQueryExpression } from '../../types';
|
||||
import { Visitor } from '../visitor';
|
||||
|
||||
test('change LIMIT from 24 to 42', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(`
|
||||
const { root } = parse(`
|
||||
FROM index
|
||||
| STATS 1, "str", [true], a = b BY field
|
||||
| LIMIT 24
|
||||
|
@ -31,7 +31,7 @@ test('change LIMIT from 24 to 42', () => {
|
|||
.on('visitLimitCommand', (ctx) => ctx.numeric())
|
||||
.on('visitCommand', () => null)
|
||||
.on('visitQuery', (ctx) => [...ctx.visitCommands()])
|
||||
.visitQuery(ast)
|
||||
.visitQuery(root)
|
||||
.filter(Boolean)[0];
|
||||
|
||||
expect(limit()).toBe(24);
|
||||
|
@ -43,7 +43,7 @@ test('change LIMIT from 24 to 42', () => {
|
|||
})
|
||||
.on('visitCommand', () => {})
|
||||
.on('visitQuery', (ctx) => [...ctx.visitCommands()])
|
||||
.visitQuery(ast);
|
||||
.visitQuery(root);
|
||||
|
||||
expect(limit()).toBe(42);
|
||||
});
|
||||
|
@ -56,7 +56,7 @@ test('change LIMIT from 24 to 42', () => {
|
|||
test.todo('can modify sorting orders');
|
||||
|
||||
test('can remove a specific WHERE command', () => {
|
||||
const query = getAstAndSyntaxErrors(`
|
||||
const query = parse(`
|
||||
FROM employees
|
||||
| KEEP first_name, last_name, still_hired
|
||||
| WHERE still_hired == true
|
||||
|
@ -115,7 +115,7 @@ test('can remove a specific WHERE command', () => {
|
|||
expect(print()).toBe('');
|
||||
});
|
||||
|
||||
export const prettyPrint = (ast: ESQLAstQueryNode) =>
|
||||
export const prettyPrint = (ast: ESQLAstQueryExpression | ESQLAstQueryExpression['commands']) =>
|
||||
new Visitor()
|
||||
.on('visitExpression', (ctx) => {
|
||||
return '<EXPRESSION>';
|
||||
|
@ -183,7 +183,7 @@ export const prettyPrint = (ast: ESQLAstQueryNode) =>
|
|||
.visitQuery(ast);
|
||||
|
||||
test('can print a query to text', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(
|
||||
const { ast } = parse(
|
||||
'FROM index METADATA _id, asdf, 123 | STATS fn([1,2], 1d, 1::string, x in (1, 2)), a = b | LIMIT 1000'
|
||||
);
|
||||
const text = prettyPrint(ast);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../../ast_parser';
|
||||
import { parse } from '../../parser';
|
||||
import { CommandVisitorContext, WhereCommandVisitorContext } from '../contexts';
|
||||
import { Visitor } from '../visitor';
|
||||
|
||||
|
@ -24,7 +24,7 @@ test('can collect all command names in type safe way', () => {
|
|||
return cmds;
|
||||
});
|
||||
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | LIMIT 123');
|
||||
const { ast } = parse('FROM index | LIMIT 123');
|
||||
const res = visitor.visitQuery(ast);
|
||||
|
||||
expect(res).toEqual(['from', 'limit']);
|
||||
|
@ -43,16 +43,14 @@ test('can pass inputs to visitors', () => {
|
|||
return cmds;
|
||||
});
|
||||
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | LIMIT 123');
|
||||
const { ast } = parse('FROM index | LIMIT 123');
|
||||
const res = visitor.visitQuery(ast);
|
||||
|
||||
expect(res).toEqual(['pfx:from', 'pfx:limit']);
|
||||
});
|
||||
|
||||
test('can specify specific visitors for commands', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(
|
||||
'FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123'
|
||||
);
|
||||
const { ast } = parse('FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123');
|
||||
const res = new Visitor()
|
||||
.on('visitWhereCommand', () => 'where')
|
||||
.on('visitSortCommand', () => 'sort')
|
||||
|
@ -65,28 +63,24 @@ test('can specify specific visitors for commands', () => {
|
|||
});
|
||||
|
||||
test('a command can access parent query node', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(
|
||||
'FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123'
|
||||
);
|
||||
const { root } = parse('FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123');
|
||||
new Visitor()
|
||||
.on('visitWhereCommand', (ctx) => {
|
||||
if (ctx.parent!.node !== ast) {
|
||||
if (ctx.parent!.node !== root) {
|
||||
throw new Error('Expected parent to be query node');
|
||||
}
|
||||
})
|
||||
.on('visitCommand', (ctx) => {
|
||||
if (ctx.parent!.node !== ast) {
|
||||
if (ctx.parent!.node !== root) {
|
||||
throw new Error('Expected parent to be query node');
|
||||
}
|
||||
})
|
||||
.on('visitQuery', (ctx) => [...ctx.visitCommands()])
|
||||
.visitQuery(ast);
|
||||
.visitQuery(root);
|
||||
});
|
||||
|
||||
test('specific commands receive specific visitor contexts', () => {
|
||||
const { ast } = getAstAndSyntaxErrors(
|
||||
'FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123'
|
||||
);
|
||||
const { root } = parse('FROM index | SORT asfd | WHERE 1 | ENRICH adsf | LIMIT 123');
|
||||
|
||||
new Visitor()
|
||||
.on('visitWhereCommand', (ctx) => {
|
||||
|
@ -103,7 +97,7 @@ test('specific commands receive specific visitor contexts', () => {
|
|||
}
|
||||
})
|
||||
.on('visitQuery', (ctx) => [...ctx.visitCommands()])
|
||||
.visitQuery(ast);
|
||||
.visitQuery(root);
|
||||
|
||||
new Visitor()
|
||||
.on('visitCommand', (ctx) => {
|
||||
|
@ -115,5 +109,5 @@ test('specific commands receive specific visitor contexts', () => {
|
|||
}
|
||||
})
|
||||
.on('visitQuery', (ctx) => [...ctx.visitCommands()])
|
||||
.visitQuery(ast);
|
||||
.visitQuery(root);
|
||||
});
|
||||
|
|
|
@ -35,7 +35,6 @@ import type {
|
|||
CommandVisitorInput,
|
||||
ESQLAstExpressionNode,
|
||||
ESQLAstQueryNode,
|
||||
ExpressionVisitorInput,
|
||||
ExpressionVisitorOutput,
|
||||
UndefinedToVoid,
|
||||
VisitorAstNode,
|
||||
|
@ -72,7 +71,9 @@ export class VisitorContext<
|
|||
) {}
|
||||
|
||||
public *visitArguments(
|
||||
input: VisitorInput<Methods, 'visitExpression'>
|
||||
input:
|
||||
| VisitorInput<Methods, 'visitExpression'>
|
||||
| (() => VisitorInput<Methods, 'visitExpression'>)
|
||||
): Iterable<VisitorOutput<Methods, 'visitExpression'>> {
|
||||
this.ctx.assertMethodExists('visitExpression');
|
||||
|
||||
|
@ -86,7 +87,12 @@ export class VisitorContext<
|
|||
if (arg.type === 'option' && arg.name !== 'as') {
|
||||
continue;
|
||||
}
|
||||
yield this.visitExpression(arg, input as any);
|
||||
yield this.visitExpression(
|
||||
arg,
|
||||
typeof input === 'function'
|
||||
? (input as () => VisitorInput<Methods, 'visitExpression'>)()
|
||||
: (input as VisitorInput<Methods, 'visitExpression'>)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,7 +100,7 @@ export class VisitorContext<
|
|||
const node = this.node;
|
||||
|
||||
if (!isNodeWithArgs(node)) {
|
||||
throw new Error('Node does not have arguments');
|
||||
return [];
|
||||
}
|
||||
|
||||
const args: ESQLAstExpressionNode[] = [];
|
||||
|
@ -148,6 +154,10 @@ export class QueryVisitorContext<
|
|||
Methods extends VisitorMethods = VisitorMethods,
|
||||
Data extends SharedData = SharedData
|
||||
> extends VisitorContext<Methods, Data, ESQLAstQueryNode> {
|
||||
public *commands(): Iterable<ESQLAstCommand> {
|
||||
yield* this.node.commands;
|
||||
}
|
||||
|
||||
public *visitCommands(
|
||||
input: UndefinedToVoid<Parameters<NonNullable<Methods['visitCommand']>>[1]>
|
||||
): Iterable<
|
||||
|
@ -156,7 +166,7 @@ export class QueryVisitorContext<
|
|||
> {
|
||||
this.ctx.assertMethodExists('visitCommand');
|
||||
|
||||
for (const cmd of this.node) {
|
||||
for (const cmd of this.node.commands) {
|
||||
yield this.visitCommand(cmd, input as any);
|
||||
}
|
||||
}
|
||||
|
@ -337,7 +347,7 @@ export class LimitCommandVisitorContext<
|
|||
}
|
||||
|
||||
public setLimit(value: number): void {
|
||||
const literalNode = Builder.numericLiteral({ value });
|
||||
const literalNode = Builder.expression.literal.numeric({ value, literalType: 'integer' });
|
||||
|
||||
this.node.args = [literalNode];
|
||||
}
|
||||
|
@ -504,12 +514,14 @@ export class ListLiteralExpressionVisitorContext<
|
|||
Node extends ESQLList = ESQLList
|
||||
> extends ExpressionVisitorContext<Methods, Data, Node> {
|
||||
public *visitElements(
|
||||
input: ExpressionVisitorInput<Methods>
|
||||
input:
|
||||
| VisitorInput<Methods, 'visitExpression'>
|
||||
| (() => VisitorInput<Methods, 'visitExpression'>)
|
||||
): Iterable<ExpressionVisitorOutput<Methods>> {
|
||||
this.ctx.assertMethodExists('visitExpression');
|
||||
|
||||
for (const value of this.node.values) {
|
||||
yield this.visitExpression(value, input as any);
|
||||
yield this.visitExpression(value, typeof input === 'function' ? (input as any)() : input);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,10 +12,9 @@ import type * as ast from '../types';
|
|||
import type * as contexts from './contexts';
|
||||
|
||||
/**
|
||||
* We don't have a dedicated "query" AST node, so - for now - we use the root
|
||||
* array of commands as the "query" node.
|
||||
* @deprecated Use `ESQLAstQueryExpression` directly.
|
||||
*/
|
||||
export type ESQLAstQueryNode = ast.ESQLAst;
|
||||
export type ESQLAstQueryNode = ast.ESQLAstQueryExpression;
|
||||
|
||||
/**
|
||||
* Represents an "expression" node in the AST.
|
||||
|
@ -26,7 +25,7 @@ export type ESQLAstExpressionNode = ast.ESQLSingleAstItem;
|
|||
/**
|
||||
* All possible AST nodes supported by the visitor.
|
||||
*/
|
||||
export type VisitorAstNode = ESQLAstQueryNode | ast.ESQLAstNode;
|
||||
export type VisitorAstNode = ast.ESQLAstQueryExpression | ast.ESQLAstNode;
|
||||
|
||||
export type Visitor<Ctx extends contexts.VisitorContext, Input = unknown, Output = unknown> = (
|
||||
ctx: Ctx,
|
||||
|
|
|
@ -14,11 +14,11 @@ import type {
|
|||
AstNodeToVisitorName,
|
||||
EnsureFunction,
|
||||
ESQLAstExpressionNode,
|
||||
ESQLAstQueryNode,
|
||||
UndefinedToVoid,
|
||||
VisitorMethods,
|
||||
} from './types';
|
||||
import { ESQLCommand } from '../types';
|
||||
import type { ESQLAstQueryExpression, ESQLCommand, ESQLProperNode } from '../types';
|
||||
import { Builder } from '../builder';
|
||||
|
||||
export interface VisitorOptions<
|
||||
Methods extends VisitorMethods = VisitorMethods,
|
||||
|
@ -32,6 +32,149 @@ export class Visitor<
|
|||
Methods extends VisitorMethods = VisitorMethods,
|
||||
Data extends SharedData = SharedData
|
||||
> {
|
||||
/**
|
||||
* Finds the most specific node immediately after the given position. If the
|
||||
* position is inside a node, it will return the node itself. If no node is
|
||||
* found, it returns `null`.
|
||||
*
|
||||
* @param ast ES|QL AST
|
||||
* @param pos Offset position in the source text
|
||||
* @returns The node at or after the given position
|
||||
*/
|
||||
public static readonly findNodeAtOrAfter = (
|
||||
ast: ESQLAstQueryExpression,
|
||||
pos: number
|
||||
): ESQLProperNode | null => {
|
||||
return new Visitor()
|
||||
.on('visitExpression', (ctx): ESQLProperNode | null => {
|
||||
const node = ctx.node;
|
||||
const location = node.location;
|
||||
if (!location) return null;
|
||||
const isBefore = location.min > pos;
|
||||
let isFirstChild = true;
|
||||
for (const child of ctx.arguments()) {
|
||||
const { location: childLocation } = child;
|
||||
if (!childLocation) continue;
|
||||
if (isFirstChild) {
|
||||
isFirstChild = false;
|
||||
if (isBefore) {
|
||||
const isChildAtOffset =
|
||||
ctx.node.location && ctx.node.location.min < childLocation.min;
|
||||
if (isChildAtOffset) return node;
|
||||
return ctx.visitExpression(child, undefined) || child;
|
||||
}
|
||||
}
|
||||
const isInsideChild = childLocation.min <= pos && childLocation.max >= pos;
|
||||
if (isInsideChild) return ctx.visitExpression(child, undefined);
|
||||
const isBeforeChild = childLocation.min > pos;
|
||||
if (isBeforeChild) {
|
||||
return ctx.visitExpression(child, undefined) || child;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.on('visitCommand', (ctx): ESQLProperNode | null => {
|
||||
for (const child of ctx.arguments()) {
|
||||
const { location: childLocation } = child;
|
||||
if (!childLocation) continue;
|
||||
const isInsideChild = childLocation.min <= pos && childLocation.max >= pos;
|
||||
if (isInsideChild) return ctx.visitExpression(child);
|
||||
const isBeforeChild = childLocation.min > pos;
|
||||
if (isBeforeChild) {
|
||||
return ctx.visitExpression(child) || child;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.on('visitQuery', (ctx): ESQLProperNode | null => {
|
||||
for (const node of ctx.commands()) {
|
||||
const { location } = node;
|
||||
if (!location) continue;
|
||||
const isInside = location.min <= pos && location.max >= pos;
|
||||
if (isInside) return ctx.visitCommand(node);
|
||||
const isBefore = location.min > pos;
|
||||
if (isBefore) return node;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.visitQuery(ast);
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the most specific node immediately before the given position. If the
|
||||
* position is inside a node, it will return the node itself. If no node is
|
||||
* found, it returns `null`.
|
||||
*
|
||||
* @param ast ES|QL AST
|
||||
* @param pos Offset position in the source text
|
||||
* @returns The node at or before the given position
|
||||
*/
|
||||
public static readonly findNodeAtOrBefore = (
|
||||
ast: ESQLAstQueryExpression,
|
||||
pos: number
|
||||
): ESQLProperNode | null => {
|
||||
return new Visitor()
|
||||
.on('visitExpression', (ctx): ESQLProperNode | null => {
|
||||
const nodeLocation = ctx.node.location;
|
||||
const nodes = [...ctx.arguments()];
|
||||
|
||||
if (nodeLocation && nodeLocation.max < pos) {
|
||||
const last = nodes[nodes.length - 1];
|
||||
if (last && last.location && last.location.max === nodeLocation.max) {
|
||||
return ctx.visitExpression(last, undefined) || last;
|
||||
} else {
|
||||
return ctx.node;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = nodes.length - 1; i >= 0; i--) {
|
||||
const node = nodes[i];
|
||||
const { location } = node;
|
||||
if (!location) continue;
|
||||
const isInside = location.min <= pos && location.max >= pos;
|
||||
if (isInside) return ctx.visitExpression(node, undefined);
|
||||
const isAfter = location.max < pos;
|
||||
if (isAfter) {
|
||||
return ctx.visitExpression(node, undefined) || node;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
})
|
||||
.on('visitCommand', (ctx): ESQLProperNode | null => {
|
||||
const nodes = [...ctx.arguments()];
|
||||
for (let i = nodes.length - 1; i >= 0; i--) {
|
||||
const node = nodes[i];
|
||||
const { location } = node;
|
||||
if (!location) continue;
|
||||
const isInside = location.min <= pos && location.max >= pos;
|
||||
if (isInside) return ctx.visitExpression(node);
|
||||
const isAfter = location.max < pos;
|
||||
if (isAfter) {
|
||||
if (ctx.node.location && ctx.node.location.max === location.max) {
|
||||
return ctx.visitExpression(node) || node;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.on('visitQuery', (ctx): ESQLProperNode | null => {
|
||||
const nodes = [...ctx.commands()];
|
||||
for (let i = nodes.length - 1; i >= 0; i--) {
|
||||
const node = nodes[i];
|
||||
const { location } = node;
|
||||
if (!location) continue;
|
||||
const isInside = location.min <= pos && location.max >= pos;
|
||||
if (isInside) return ctx.visitCommand(node);
|
||||
const isAfter = location.max < pos;
|
||||
if (isAfter) return ctx.visitCommand(node) || node;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.visitQuery(ast);
|
||||
};
|
||||
|
||||
public readonly ctx: GlobalVisitorContext<Methods, Data>;
|
||||
|
||||
constructor(protected readonly options: VisitorOptions<Methods, Data> = {}) {
|
||||
|
@ -69,17 +212,21 @@ export class Visitor<
|
|||
): ReturnType<EnsureFunction<Methods[AstNodeToVisitorName<Ctx['node']>]>> {
|
||||
const node = ctx.node;
|
||||
if (node instanceof Array) {
|
||||
this.ctx.assertMethodExists('visitQuery');
|
||||
return this.ctx.methods.visitQuery!(ctx as any, input) as ReturnType<
|
||||
NonNullable<Methods['visitQuery']>
|
||||
>;
|
||||
throw new Error(`Unsupported node type: ${typeof node}`);
|
||||
} else if (node && typeof node === 'object') {
|
||||
switch (node.type) {
|
||||
case 'command':
|
||||
case 'query': {
|
||||
this.ctx.assertMethodExists('visitQuery');
|
||||
return this.ctx.methods.visitQuery!(ctx as any, input) as ReturnType<
|
||||
NonNullable<Methods['visitQuery']>
|
||||
>;
|
||||
}
|
||||
case 'command': {
|
||||
this.ctx.assertMethodExists('visitCommand');
|
||||
return this.ctx.methods.visitCommand!(ctx as any, input) as ReturnType<
|
||||
NonNullable<Methods['visitCommand']>
|
||||
>;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(`Unsupported node type: ${typeof node}`);
|
||||
|
@ -93,9 +240,12 @@ export class Visitor<
|
|||
* @returns The result of the query visitor.
|
||||
*/
|
||||
public visitQuery(
|
||||
node: ESQLAstQueryNode,
|
||||
nodeOrCommands: ESQLAstQueryExpression | ESQLAstQueryExpression['commands'],
|
||||
input: UndefinedToVoid<Parameters<NonNullable<Methods['visitQuery']>>[1]>
|
||||
) {
|
||||
const node = Array.isArray(nodeOrCommands)
|
||||
? Builder.expression.query(nodeOrCommands)
|
||||
: nodeOrCommands;
|
||||
const queryContext = new QueryVisitorContext(this.ctx, node, null);
|
||||
return this.visit(queryContext, input);
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { getAstAndSyntaxErrors } from '../ast_parser';
|
||||
import { parse } from '../parser';
|
||||
import {
|
||||
ESQLColumn,
|
||||
ESQLCommand,
|
||||
|
@ -24,10 +24,10 @@ import {
|
|||
import { walk, Walker } from './walker';
|
||||
|
||||
test('can walk all functions', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('METRICS index a(b(c(foo)))');
|
||||
const { root } = parse('METRICS index a(b(c(foo)))');
|
||||
const functions: string[] = [];
|
||||
|
||||
walk(ast, {
|
||||
walk(root, {
|
||||
visitFunction: (fn) => functions.push(fn.name),
|
||||
});
|
||||
|
||||
|
@ -36,7 +36,7 @@ test('can walk all functions', () => {
|
|||
|
||||
test('can find assignment expression', () => {
|
||||
const query = 'METRICS source var0 = bucket(bytes, 1 hour)';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const functions: ESQLFunction[] = [];
|
||||
|
||||
Walker.walk(ast, {
|
||||
|
@ -56,7 +56,7 @@ test('can find assignment expression', () => {
|
|||
describe('structurally can walk all nodes', () => {
|
||||
describe('commands', () => {
|
||||
test('can visit a single source command', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index');
|
||||
const { ast } = parse('FROM index');
|
||||
const commands: ESQLCommand[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -67,7 +67,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('can visit all commands', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const { ast } = parse('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const commands: ESQLCommand[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -83,7 +83,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('"visitAny" can capture command nodes', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const { ast } = parse('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const commands: ESQLCommand[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -102,7 +102,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
describe('command options', () => {
|
||||
test('can visit command options', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index METADATA _index');
|
||||
const { ast } = parse('FROM index METADATA _index');
|
||||
const options: ESQLCommandOption[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -114,7 +114,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('"visitAny" can capture an options node', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index METADATA _index');
|
||||
const { ast } = parse('FROM index METADATA _index');
|
||||
const options: ESQLCommandOption[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -130,7 +130,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
describe('command mode', () => {
|
||||
test('visits "mode" nodes', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | ENRICH a:b');
|
||||
const { ast } = parse('FROM index | ENRICH a:b');
|
||||
const modes: ESQLCommandMode[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -142,7 +142,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('"visitAny" can capture a mode node', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | ENRICH a:b');
|
||||
const { ast } = parse('FROM index | ENRICH a:b');
|
||||
const modes: ESQLCommandMode[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -159,7 +159,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('expressions', () => {
|
||||
describe('sources', () => {
|
||||
test('iterates through a single source', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index');
|
||||
const { ast } = parse('FROM index');
|
||||
const sources: ESQLSource[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -171,7 +171,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('"visitAny" can capture a source node', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index');
|
||||
const { ast } = parse('FROM index');
|
||||
const sources: ESQLSource[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -185,7 +185,7 @@ describe('structurally can walk all nodes', () => {
|
|||
});
|
||||
|
||||
test('iterates through all sources', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('METRICS index, index2, index3, index4');
|
||||
const { ast } = parse('METRICS index, index2, index3, index4');
|
||||
const sources: ESQLSource[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -205,7 +205,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('columns', () => {
|
||||
test('can walk through a single column', () => {
|
||||
const query = 'ROW x = 1';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLColumn[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -222,7 +222,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" can capture a column', () => {
|
||||
const query = 'ROW x = 1';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLColumn[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -241,7 +241,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('can walk through multiple columns', () => {
|
||||
const query = 'FROM index | STATS a = 123, b = 456';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLColumn[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -264,7 +264,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('functions', () => {
|
||||
test('can walk through functions', () => {
|
||||
const query = 'FROM a | STATS fn(1), agg(true)';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const nodes: ESQLFunction[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -285,7 +285,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" can capture function nodes', () => {
|
||||
const query = 'FROM a | STATS fn(1), agg(true)';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const nodes: ESQLFunction[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -310,7 +310,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('literals', () => {
|
||||
test('can walk a single literal', () => {
|
||||
const query = 'ROW x = 1';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLLiteral[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -327,7 +327,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('can walk through all literals', () => {
|
||||
const query = 'FROM index | STATS a = 123, b = "foo", c = true AND false';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLLiteral[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -360,7 +360,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('can walk through literals inside functions', () => {
|
||||
const query = 'FROM index | STATS f(1, "2", g(true) + false, h(j(k(3.14))))';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const columns: ESQLLiteral[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -401,7 +401,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('numeric', () => {
|
||||
test('can walk a single numeric list literal', () => {
|
||||
const query = 'ROW x = [1, 2]';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -429,7 +429,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" can capture a list literal', () => {
|
||||
const query = 'ROW x = [1, 2]';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -443,7 +443,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('can walk plain literals inside list literal', () => {
|
||||
const query = 'ROW x = [1, 2] + [3.3]';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
const literals: ESQLLiteral[] = [];
|
||||
|
||||
|
@ -502,7 +502,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('boolean', () => {
|
||||
test('can walk a single numeric list literal', () => {
|
||||
const query = 'ROW x = [true, false]';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -530,7 +530,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('can walk plain literals inside list literal', () => {
|
||||
const query = 'ROW x = [false, false], b([true, true, true])';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
const literals: ESQLLiteral[] = [];
|
||||
|
||||
|
@ -580,7 +580,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('string', () => {
|
||||
test('can walk string literals', () => {
|
||||
const query = 'ROW x = ["a", "b"], b(["c", "d", "e"])';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const lists: ESQLList[] = [];
|
||||
const literals: ESQLLiteral[] = [];
|
||||
|
||||
|
@ -631,7 +631,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('time interval', () => {
|
||||
test('can visit time interval nodes', () => {
|
||||
const query = 'FROM index | STATS a = 123 BY 1h';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const intervals: ESQLTimeInterval[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -649,7 +649,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" can capture time interval expressions', () => {
|
||||
const query = 'FROM index | STATS a = 123 BY 1h';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const intervals: ESQLTimeInterval[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -669,7 +669,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" does not capture time interval node if type-specific callback provided', () => {
|
||||
const query = 'FROM index | STATS a = 123 BY 1h';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const intervals1: ESQLTimeInterval[] = [];
|
||||
const intervals2: ESQLTimeInterval[] = [];
|
||||
|
||||
|
@ -688,7 +688,7 @@ describe('structurally can walk all nodes', () => {
|
|||
describe('cast expression', () => {
|
||||
test('can visit cast expression', () => {
|
||||
const query = 'FROM index | STATS a = 123::integer';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
|
||||
const casts: ESQLInlineCast[] = [];
|
||||
|
||||
|
@ -711,7 +711,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
test('"visitAny" can capture cast expression', () => {
|
||||
const query = 'FROM index | STATS a = 123::integer';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const casts: ESQLInlineCast[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -738,7 +738,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
describe('unknown nodes', () => {
|
||||
test('can iterate through "unknown" nodes', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index');
|
||||
const { ast } = parse('FROM index');
|
||||
let source: ESQLSource | undefined;
|
||||
|
||||
walk(ast, {
|
||||
|
@ -764,7 +764,7 @@ describe('structurally can walk all nodes', () => {
|
|||
|
||||
describe('Walker.commands()', () => {
|
||||
test('can collect all commands', () => {
|
||||
const { ast } = getAstAndSyntaxErrors('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const { ast } = parse('FROM index | STATS a = 123 | WHERE 123 | LIMIT 10');
|
||||
const commands = Walker.commands(ast);
|
||||
|
||||
expect(commands.map(({ name }) => name).sort()).toStrictEqual([
|
||||
|
@ -779,7 +779,7 @@ describe('Walker.commands()', () => {
|
|||
describe('Walker.params()', () => {
|
||||
test('can collect all params', () => {
|
||||
const query = 'ROW x = ?';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const params = Walker.params(ast);
|
||||
|
||||
expect(params).toMatchObject([
|
||||
|
@ -794,7 +794,7 @@ describe('Walker.params()', () => {
|
|||
test('can collect all params from grouping functions', () => {
|
||||
const query =
|
||||
'ROW x=1, time=2024-07-10 | stats z = avg(x) by bucket(time, 20, ?_tstart,?_tend)';
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const params = Walker.params(ast);
|
||||
|
||||
expect(params).toMatchObject([
|
||||
|
@ -818,7 +818,7 @@ describe('Walker.find()', () => {
|
|||
test('can find a bucket() function', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const fn = Walker.find(
|
||||
getAstAndSyntaxErrors(query).ast!,
|
||||
parse(query).ast!,
|
||||
(node) => node.type === 'function' && node.name === 'bucket'
|
||||
);
|
||||
|
||||
|
@ -831,7 +831,7 @@ describe('Walker.find()', () => {
|
|||
test('finds the first "fn" function', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const fn = Walker.find(
|
||||
getAstAndSyntaxErrors(query).ast!,
|
||||
parse(query).ast!,
|
||||
(node) => node.type === 'function' && node.name === 'fn'
|
||||
);
|
||||
|
||||
|
@ -852,7 +852,7 @@ describe('Walker.findAll()', () => {
|
|||
test('find all "fn" functions', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const list = Walker.findAll(
|
||||
getAstAndSyntaxErrors(query).ast!,
|
||||
parse(query).ast!,
|
||||
(node) => node.type === 'function' && node.name === 'fn'
|
||||
);
|
||||
|
||||
|
@ -884,7 +884,7 @@ describe('Walker.findAll()', () => {
|
|||
describe('Walker.match()', () => {
|
||||
test('can find a bucket() function', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const fn = Walker.match(getAstAndSyntaxErrors(query).ast!, {
|
||||
const fn = Walker.match(parse(query).ast!, {
|
||||
type: 'function',
|
||||
name: 'bucket',
|
||||
});
|
||||
|
@ -897,7 +897,7 @@ describe('Walker.match()', () => {
|
|||
|
||||
test('finds the first "fn" function', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const fn = Walker.match(getAstAndSyntaxErrors(query).ast!, { type: 'function', name: 'fn' });
|
||||
const fn = Walker.match(parse(query).ast!, { type: 'function', name: 'fn' });
|
||||
|
||||
expect(fn).toMatchObject({
|
||||
type: 'function',
|
||||
|
@ -915,7 +915,7 @@ describe('Walker.match()', () => {
|
|||
describe('Walker.matchAll()', () => {
|
||||
test('find all "fn" functions', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const list = Walker.matchAll(getAstAndSyntaxErrors(query).ast!, {
|
||||
const list = Walker.matchAll(parse(query).ast!, {
|
||||
type: 'function',
|
||||
name: 'fn',
|
||||
});
|
||||
|
@ -946,7 +946,7 @@ describe('Walker.matchAll()', () => {
|
|||
|
||||
test('find all "fn" and "agg" functions', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const list = Walker.matchAll(getAstAndSyntaxErrors(query).ast!, {
|
||||
const list = Walker.matchAll(parse(query).ast!, {
|
||||
type: 'function',
|
||||
name: ['fn', 'agg'],
|
||||
});
|
||||
|
@ -981,7 +981,7 @@ describe('Walker.matchAll()', () => {
|
|||
|
||||
test('find all functions which start with "b" or "a"', () => {
|
||||
const query = 'FROM b | STATS var0 = bucket(bytes, 1 hour), fn(1), fn(2), agg(true)';
|
||||
const list = Walker.matchAll(getAstAndSyntaxErrors(query).ast!, {
|
||||
const list = Walker.matchAll(parse(query).ast!, {
|
||||
type: 'function',
|
||||
name: /^a|b/i,
|
||||
});
|
||||
|
@ -1003,8 +1003,8 @@ describe('Walker.hasFunction()', () => {
|
|||
test('can find assignment expression', () => {
|
||||
const query1 = 'FROM a | STATS bucket(bytes, 1 hour)';
|
||||
const query2 = 'FROM b | STATS var0 = bucket(bytes, 1 hour)';
|
||||
const has1 = Walker.hasFunction(getAstAndSyntaxErrors(query1).ast!, '=');
|
||||
const has2 = Walker.hasFunction(getAstAndSyntaxErrors(query2).ast!, '=');
|
||||
const has1 = Walker.hasFunction(parse(query1).ast!, '=');
|
||||
const has2 = Walker.hasFunction(parse(query2).ast!, '=');
|
||||
|
||||
expect(has1).toBe(false);
|
||||
expect(has2).toBe(true);
|
||||
|
|
|
@ -9,8 +9,12 @@
|
|||
|
||||
import type {
|
||||
ESQLAstCommand,
|
||||
ESQLAstComment,
|
||||
ESQLAstExpression,
|
||||
ESQLAstItem,
|
||||
ESQLAstNode,
|
||||
ESQLAstNodeFormatting,
|
||||
ESQLAstQueryExpression,
|
||||
ESQLColumn,
|
||||
ESQLCommand,
|
||||
ESQLCommandMode,
|
||||
|
@ -34,9 +38,11 @@ export interface WalkerOptions {
|
|||
visitCommand?: (node: ESQLCommand) => void;
|
||||
visitCommandOption?: (node: ESQLCommandOption) => void;
|
||||
visitCommandMode?: (node: ESQLCommandMode) => void;
|
||||
visitSingleAstItem?: (node: ESQLSingleAstItem) => void;
|
||||
visitSource?: (node: ESQLSource) => void;
|
||||
/** @todo Rename to `visitExpression`. */
|
||||
visitSingleAstItem?: (node: ESQLAstExpression) => void;
|
||||
visitQuery?: (node: ESQLAstQueryExpression) => void;
|
||||
visitFunction?: (node: ESQLFunction) => void;
|
||||
visitSource?: (node: ESQLSource) => void;
|
||||
visitColumn?: (node: ESQLColumn) => void;
|
||||
visitLiteral?: (node: ESQLLiteral) => void;
|
||||
visitListLiteral?: (node: ESQLList) => void;
|
||||
|
@ -224,6 +230,58 @@ export class Walker {
|
|||
return !!Walker.findFunction(node, (fn) => fn.name === name);
|
||||
};
|
||||
|
||||
public static readonly visitComments = (
|
||||
root: ESQLAstNode | ESQLAstNode[],
|
||||
callback: (
|
||||
comment: ESQLAstComment,
|
||||
node: ESQLProperNode,
|
||||
attachment: keyof ESQLAstNodeFormatting
|
||||
) => void
|
||||
): void => {
|
||||
Walker.walk(root, {
|
||||
visitAny: (node) => {
|
||||
const formatting = node.formatting;
|
||||
if (!formatting) return;
|
||||
|
||||
if (formatting.top) {
|
||||
for (const decoration of formatting.top) {
|
||||
if (decoration.type === 'comment') {
|
||||
callback(decoration, node, 'top');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.left) {
|
||||
for (const decoration of formatting.left) {
|
||||
if (decoration.type === 'comment') {
|
||||
callback(decoration, node, 'left');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.right) {
|
||||
for (const decoration of formatting.right) {
|
||||
if (decoration.type === 'comment') {
|
||||
callback(decoration, node, 'right');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (formatting.rightSingleLine) {
|
||||
callback(formatting.rightSingleLine, node, 'rightSingleLine');
|
||||
}
|
||||
|
||||
if (formatting.bottom) {
|
||||
for (const decoration of formatting.bottom) {
|
||||
if (decoration.type === 'comment') {
|
||||
callback(decoration, node, 'bottom');
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
constructor(protected readonly options: WalkerOptions) {}
|
||||
|
||||
public walk(node: undefined | ESQLAstNode | ESQLAstNode[]): void {
|
||||
|
@ -288,10 +346,36 @@ export class Walker {
|
|||
}
|
||||
}
|
||||
|
||||
public walkSingleAstItem(node: ESQLSingleAstItem): void {
|
||||
public walkFunction(node: ESQLFunction): void {
|
||||
const { options } = this;
|
||||
(options.visitFunction ?? options.visitAny)?.(node);
|
||||
const args = node.args;
|
||||
const length = args.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
const arg = args[i];
|
||||
this.walkAstItem(arg);
|
||||
}
|
||||
}
|
||||
|
||||
public walkQuery(node: ESQLAstQueryExpression): void {
|
||||
const { options } = this;
|
||||
(options.visitQuery ?? options.visitAny)?.(node);
|
||||
const commands = node.commands;
|
||||
const length = commands.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
const arg = commands[i];
|
||||
this.walkCommand(arg);
|
||||
}
|
||||
}
|
||||
|
||||
public walkSingleAstItem(node: ESQLAstExpression): void {
|
||||
const { options } = this;
|
||||
options.visitSingleAstItem?.(node);
|
||||
switch (node.type) {
|
||||
case 'query': {
|
||||
this.walkQuery(node as ESQLAstQueryExpression);
|
||||
break;
|
||||
}
|
||||
case 'function': {
|
||||
this.walkFunction(node as ESQLFunction);
|
||||
break;
|
||||
|
@ -313,7 +397,7 @@ export class Walker {
|
|||
break;
|
||||
}
|
||||
case 'literal': {
|
||||
options.visitLiteral?.(node);
|
||||
(options.visitLiteral ?? options.visitAny)?.(node);
|
||||
break;
|
||||
}
|
||||
case 'list': {
|
||||
|
@ -334,17 +418,6 @@ export class Walker {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public walkFunction(node: ESQLFunction): void {
|
||||
const { options } = this;
|
||||
(options.visitFunction ?? options.visitAny)?.(node);
|
||||
const args = node.args;
|
||||
const length = args.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
const arg = args[i];
|
||||
this.walkAstItem(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const walk = Walker.walk;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
import { getAstAndSyntaxErrors, Walker, walk, BasicPrettyPrinter } from '@kbn/esql-ast';
|
||||
import { parse, Walker, walk, BasicPrettyPrinter } from '@kbn/esql-ast';
|
||||
|
||||
import type {
|
||||
ESQLSource,
|
||||
|
@ -20,7 +20,7 @@ const DEFAULT_ESQL_LIMIT = 1000;
|
|||
|
||||
// retrieves the index pattern from the aggregate query for ES|QL using ast parsing
|
||||
export function getIndexPatternFromESQLQuery(esql?: string) {
|
||||
const { ast } = getAstAndSyntaxErrors(esql);
|
||||
const { ast } = parse(esql);
|
||||
const sourceCommand = ast.find(({ name }) => ['from', 'metrics'].includes(name));
|
||||
const args = (sourceCommand?.args ?? []) as ESQLSource[];
|
||||
const indices = args.filter((arg) => arg.sourceType === 'index');
|
||||
|
@ -31,7 +31,7 @@ export function getIndexPatternFromESQLQuery(esql?: string) {
|
|||
// The metrics command too but only if it aggregates
|
||||
export function hasTransformationalCommand(esql?: string) {
|
||||
const transformationalCommands = ['stats', 'keep'];
|
||||
const { ast } = getAstAndSyntaxErrors(esql);
|
||||
const { ast } = parse(esql);
|
||||
const hasAtLeastOneTransformationalCommand = transformationalCommands.some((command) =>
|
||||
ast.find(({ name }) => name === command)
|
||||
);
|
||||
|
@ -48,7 +48,7 @@ export function hasTransformationalCommand(esql?: string) {
|
|||
}
|
||||
|
||||
export function getLimitFromESQLQuery(esql: string): number {
|
||||
const { ast } = getAstAndSyntaxErrors(esql);
|
||||
const { ast } = parse(esql);
|
||||
const limitCommands = ast.filter(({ name }) => name === 'limit');
|
||||
if (!limitCommands || !limitCommands.length) {
|
||||
return DEFAULT_ESQL_LIMIT;
|
||||
|
@ -82,7 +82,7 @@ export function removeDropCommandsFromESQLQuery(esql?: string): string {
|
|||
* @returns string
|
||||
*/
|
||||
export const getTimeFieldFromESQLQuery = (esql: string) => {
|
||||
const { ast } = getAstAndSyntaxErrors(esql);
|
||||
const { ast } = parse(esql);
|
||||
const functions: ESQLFunction[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
@ -115,19 +115,19 @@ export const getTimeFieldFromESQLQuery = (esql: string) => {
|
|||
};
|
||||
|
||||
export const isQueryWrappedByPipes = (query: string): boolean => {
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
const { ast } = parse(query);
|
||||
const numberOfCommands = ast.length;
|
||||
const pipesWithNewLine = query.split('\n |');
|
||||
return numberOfCommands === pipesWithNewLine?.length;
|
||||
};
|
||||
|
||||
export const prettifyQuery = (query: string, isWrapped: boolean): string => {
|
||||
const { ast } = getAstAndSyntaxErrors(query);
|
||||
return BasicPrettyPrinter.print(ast, { multiline: !isWrapped });
|
||||
const { root } = parse(query);
|
||||
return BasicPrettyPrinter.print(root, { multiline: !isWrapped });
|
||||
};
|
||||
|
||||
export const retrieveMetadataColumns = (esql: string): string[] => {
|
||||
const { ast } = getAstAndSyntaxErrors(esql);
|
||||
const { ast } = parse(esql);
|
||||
const options: ESQLCommandOption[] = [];
|
||||
|
||||
walk(ast, {
|
||||
|
|
|
@ -7,14 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { CharStreams } from 'antlr4';
|
||||
import {
|
||||
getAstAndSyntaxErrors,
|
||||
getParser,
|
||||
ROOT_STATEMENT,
|
||||
ESQLErrorListener,
|
||||
type EditorError,
|
||||
} from '@kbn/esql-ast';
|
||||
import { parse, parseErrors, type EditorError } from '@kbn/esql-ast';
|
||||
import type { monaco } from '../../monaco_imports';
|
||||
import type { BaseWorkerDefinition } from '../../types';
|
||||
|
||||
|
@ -38,33 +31,21 @@ export class ESQLWorker implements BaseWorkerDefinition {
|
|||
this._ctx = ctx;
|
||||
}
|
||||
|
||||
private getModelCharStream(modelUri: string) {
|
||||
public async getSyntaxErrors(modelUri: string) {
|
||||
const model = this._ctx.getMirrorModels().find((m) => m.uri.toString() === modelUri);
|
||||
const text = model?.getValue();
|
||||
|
||||
if (text) {
|
||||
return CharStreams.fromString(text);
|
||||
}
|
||||
}
|
||||
if (!text) return [];
|
||||
|
||||
public async getSyntaxErrors(modelUri: string) {
|
||||
const inputStream = this.getModelCharStream(modelUri);
|
||||
const errors = parseErrors(text);
|
||||
|
||||
if (inputStream) {
|
||||
const errorListener = new ESQLErrorListener();
|
||||
const parser = getParser(inputStream, errorListener);
|
||||
|
||||
parser[ROOT_STATEMENT]();
|
||||
|
||||
return errorListener.getErrors().map(inlineToMonacoErrors);
|
||||
}
|
||||
return [];
|
||||
return errors.map(inlineToMonacoErrors);
|
||||
}
|
||||
|
||||
getAst(text: string | undefined) {
|
||||
const rawAst = getAstAndSyntaxErrors(text);
|
||||
const rawAst = parse(text);
|
||||
return {
|
||||
ast: rawAst.ast,
|
||||
ast: rawAst.root.commands,
|
||||
errors: rawAst.errors.map(inlineToMonacoErrors),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ export const storybookAliases = {
|
|||
data: 'src/plugins/data/.storybook',
|
||||
discover: 'src/plugins/discover/.storybook',
|
||||
embeddable: 'src/plugins/embeddable/.storybook',
|
||||
esql_ast_inspector: 'examples/esql_ast_inspector/.storybook',
|
||||
es_ui_shared: 'src/plugins/es_ui_shared/.storybook',
|
||||
expandable_flyout: 'packages/kbn-expandable-flyout/.storybook',
|
||||
expression_error: 'src/plugins/expression_error/.storybook',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue