mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Migrate to official antlr4 package (#177211)
## Summary This PR migrates kibana away from using the antlr4ts to the official antlr package, so the grammar used in Kibana (for example with ES|QL) is on par with ES. <!-- ### Checklist Delete any items that are not applicable to this PR. - [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) - [ ] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [ ] Any UI touched in this PR is usable by keyboard only (learn more about [keyboard accessibility](https://webaim.org/techniques/keyboard/)) - [ ] Any UI touched in this PR does not create any new axe failures (run axe in browser: [FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/), [Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US)) - [ ] If a plugin configuration key changed, check if it needs to be allowlisted in the cloud and added to the [docker list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker) - [ ] This renders correctly on smaller devices using a responsive layout. (You can test this [in your browser](https://www.browserstack.com/guide/responsive-testing-on-local-server)) - [ ] This was checked for [cross-browser compatibility](https://www.elastic.co/support/matrix#matrix_browsers) ### Risk Matrix Delete this section if it is not applicable to this PR. Before closing this PR, invite QA, stakeholders, and other developers to identify risks that should be tested prior to the change/feature release. When forming the risk matrix, consider some of the following examples and how they may potentially impact the change: | Risk | Probability | Severity | Mitigation/Notes | |---------------------------|-------------|----------|-------------------------| | Multiple Spaces—unexpected behavior in non-default Kibana Space. | Low | High | Integration tests will verify that all features are still supported in non-default Kibana Space and when user switches between spaces. | | Multiple nodes—Elasticsearch polling might have race conditions when multiple Kibana nodes are polling for the same tasks. | High | Low | Tasks are idempotent, so executing them multiple times will not result in logical error, but will degrade performance. To test for this case we add plenty of unit tests around this logic and document manual testing procedure. | | Code should gracefully handle cases when feature X or plugin Y are disabled. | Medium | High | Unit tests will verify that any feature flag or plugin combination still results in our service operational. | | [See more potential risk examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx) | ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) --> --------- Co-authored-by: dej611 <dej611@gmail.com>
This commit is contained in:
parent
26b6217764
commit
cbcbb938fc
41 changed files with 6921 additions and 7588 deletions
|
@ -905,7 +905,7 @@
|
|||
"adm-zip": "^0.5.9",
|
||||
"ajv": "^8.12.0",
|
||||
"ansi-regex": "^6.0.1",
|
||||
"antlr4ts": "^0.5.0-alpha.3",
|
||||
"antlr4": "^4.13.1-patch-1",
|
||||
"archiver": "^5.3.1",
|
||||
"async": "^3.2.3",
|
||||
"aws4": "^1.12.0",
|
||||
|
@ -1512,7 +1512,6 @@
|
|||
"@yarnpkg/lockfile": "^1.1.0",
|
||||
"abab": "^2.0.4",
|
||||
"aggregate-error": "^3.1.0",
|
||||
"antlr4ts-cli": "^0.5.0-alpha.3",
|
||||
"apidoc-markdown": "^7.3.0",
|
||||
"argsplit": "^1.0.5",
|
||||
"autoprefixer": "^10.4.7",
|
||||
|
|
|
@ -26,7 +26,7 @@ SRCS = glob(
|
|||
SHARED_DEPS = [
|
||||
"//packages/kbn-i18n",
|
||||
"//packages/kbn-ui-theme",
|
||||
"@npm//antlr4ts",
|
||||
"@npm//antlr4",
|
||||
"@npm//monaco-editor",
|
||||
"@npm//monaco-yaml",
|
||||
"@npm//js-levenshtein",
|
||||
|
|
|
@ -4,8 +4,9 @@
|
|||
"private": true,
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0",
|
||||
"scripts": {
|
||||
"build:antlr4ts:painless": "../../node_modules/antlr4ts-cli/antlr4ts ./src/painless/antlr/painless_lexer.g4 ./src/painless/antlr/painless_parser.g4 && node ./scripts/fix_generated_antlr.js painless",
|
||||
"build:antlr4ts:esql": "../../node_modules/antlr4ts-cli/antlr4ts src/esql/antlr/esql_lexer.g4 src/esql/antlr/esql_parser.g4 && node ./scripts/fix_generated_antlr.js esql && node ./scripts/esql_update_ast_script.js",
|
||||
"build:antlr4ts": "npm run build:antlr4ts:painless && npm run build:antlr4ts:esql"
|
||||
"build:antlr4:painless": "antlr -Dlanguage=TypeScript ./src/painless/antlr/painless_lexer.g4 ./src/painless/antlr/painless_parser.g4 && node ./scripts/fix_generated_antlr.js painless",
|
||||
"build:antlr4:esql": "antlr -Dlanguage=TypeScript src/esql/antlr/esql_lexer.g4 src/esql/antlr/esql_parser.g4 && node ./scripts/fix_generated_antlr.js esql && node ./scripts/esql_update_ast_script.js",
|
||||
"prebuild:antlr4": "brew bundle --file=./scripts/antlr4_tools/brewfile",
|
||||
"build:antlr4": "yarn run build:antlr4:painless && npm run build:antlr4:esql"
|
||||
}
|
||||
}
|
||||
|
|
1
packages/kbn-monaco/scripts/antlr4_tools/.gitignore
vendored
Normal file
1
packages/kbn-monaco/scripts/antlr4_tools/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
brewfile.lock.json
|
3
packages/kbn-monaco/scripts/antlr4_tools/README.md
Normal file
3
packages/kbn-monaco/scripts/antlr4_tools/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
## antlr4-tools
|
||||
|
||||
defines a fairly quick way to get [antlr](https://github.com/antlr/antlr4) setup on any machine with brew support, simply run `brew bundle` within this directory. On running the aforementioned command, `antlr` should be become available in your path as a valid executable.
|
2
packages/kbn-monaco/scripts/antlr4_tools/brewfile
Normal file
2
packages/kbn-monaco/scripts/antlr4_tools/brewfile
Normal file
|
@ -0,0 +1,2 @@
|
|||
brew "openjdk"
|
||||
brew "antlr"
|
|
@ -13,11 +13,11 @@ const { readdirSync, readFileSync, writeFileSync } = require('fs');
|
|||
const ora = require('ora');
|
||||
const log = ora('Updating ES|QL AST walker from antlr grammar').start();
|
||||
/*
|
||||
* This script will read from the parser file generated by the "build:antlr4ts:esql" task
|
||||
* This script will read from the parser file generated by the "build:antlr4:esql" task
|
||||
* and extract all quoted/unquoted tokens to update their ids
|
||||
* into the "ast_helper" file.
|
||||
* This prevents the bundle size to increase by ~500 kb ("esql_parser" size).
|
||||
* This script is run at the end of "build:antlr4ts:esql" task, so no need to call it manually.
|
||||
* This script is run at the end of "build:antlr4:esql" task, so no need to call it manually.
|
||||
*/
|
||||
async function execute(folder) {
|
||||
const generatedAntlrFolder = join(__dirname, '..', 'src', folder, 'antlr');
|
||||
|
@ -60,7 +60,7 @@ function getQuotedText(ctx: ParserRuleContext) {
|
|||
return [
|
||||
${quotedList.map(({ name, value }) => `${value} /* esql_parser.${name} */`).join(', ')}
|
||||
]
|
||||
.map((keyCode) => ctx.tryGetToken(keyCode, 0))
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ function getUnquotedText(ctx: ParserRuleContext) {
|
|||
${unquotedList.map(({ name, value }) => `${value} /* esql_parser.${name} */`).join(', ')}
|
||||
|
||||
]
|
||||
.map((keyCode) => ctx.tryGetToken(keyCode, 0))
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
/* SCRIPT_MARKER_END */
|
||||
|
|
|
@ -32,7 +32,9 @@ function execute(folder) {
|
|||
.toString()
|
||||
.split('\n');
|
||||
|
||||
fileContentRows.unshift('// @ts-nocheck');
|
||||
if (!/\@ts-nocheck/.test(fileContentRows[0])) {
|
||||
fileContentRows.unshift('// @ts-nocheck');
|
||||
}
|
||||
|
||||
const filePath = join(generatedAntlrFolder, file);
|
||||
const fileContent = fileContentRows.join('\n');
|
||||
|
@ -44,7 +46,7 @@ function execute(folder) {
|
|||
});
|
||||
|
||||
// Rename generated parserListener file to snakecase to satisfy file casing check
|
||||
// There doesn't appear to be a way to fix this OOTB with antlr4ts-cli
|
||||
// There doesn't appear to be a way to fix this OOTB with antlr4
|
||||
try {
|
||||
renameSync(
|
||||
join(generatedAntlrFolder, `${folder}_parserListener.ts`),
|
||||
|
|
|
@ -6,18 +6,20 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ANTLRErrorListener, Recognizer } from 'antlr4ts';
|
||||
import type { Recognizer, RecognitionException } from 'antlr4';
|
||||
import { ErrorListener } from 'antlr4';
|
||||
import type { EditorError } from '../types';
|
||||
|
||||
export class ANTLREErrorListener implements ANTLRErrorListener<any> {
|
||||
private errors: EditorError[] = [];
|
||||
export class ANTLRErrorListener extends ErrorListener<any> {
|
||||
protected errors: EditorError[] = [];
|
||||
|
||||
syntaxError(
|
||||
recognizer: Recognizer<any, any>,
|
||||
recognizer: Recognizer<any>,
|
||||
offendingSymbol: any,
|
||||
line: number,
|
||||
column: number,
|
||||
message: string
|
||||
message: string,
|
||||
error: RecognitionException | undefined
|
||||
): void {
|
||||
let endColumn = column + 1;
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -13,7 +13,6 @@ import { ESQL_LANG_ID } from './lib/constants';
|
|||
import type { CustomLangModuleType } from '../types';
|
||||
import type { ESQLWorker } from './worker/esql_worker';
|
||||
|
||||
import { DiagnosticsAdapter } from '../common/diagnostics_adapter';
|
||||
import { WorkerProxyService } from '../common/worker_proxy';
|
||||
import type { ESQLCallbacks } from './lib/ast/shared/types';
|
||||
import { ESQLAstAdapter } from './lib/monaco/esql_ast_provider';
|
||||
|
@ -28,10 +27,6 @@ export const ESQLLang: CustomLangModuleType<ESQLCallbacks> = {
|
|||
workerProxyService.setup(ESQL_LANG_ID);
|
||||
|
||||
monaco.languages.setTokensProvider(ESQL_LANG_ID, new ESQLTokensProvider());
|
||||
|
||||
// handle syntax errors via the diagnostic adapter
|
||||
// but then enrich them via the separate validate function
|
||||
new DiagnosticsAdapter(ESQL_LANG_ID, (...uris) => workerProxyService.getWorker(uris));
|
||||
},
|
||||
languageConfiguration: {
|
||||
brackets: [
|
||||
|
|
|
@ -6,17 +6,17 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CommonTokenStream, type CodePointCharStream, type ANTLRErrorListener } from 'antlr4ts';
|
||||
import { CommonTokenStream, type CharStream, type ErrorListener } from 'antlr4';
|
||||
|
||||
import { esql_lexer as ESQLLexer } from '../antlr/esql_lexer';
|
||||
import { esql_parser as ESQLParser } from '../antlr/esql_parser';
|
||||
import type { esql_parserListener as ESQLParserListener } from '../antlr/esql_parser_listener';
|
||||
import { default as ESQLLexer } from '../antlr/esql_lexer';
|
||||
import { default as ESQLParser } from '../antlr/esql_parser';
|
||||
import { default as ESQLParserListener } from '../antlr/esql_parser_listener';
|
||||
|
||||
export const ROOT_STATEMENT = 'singleStatement';
|
||||
|
||||
export const getParser = (
|
||||
inputStream: CodePointCharStream,
|
||||
errorListener: ANTLRErrorListener<any>,
|
||||
inputStream: CharStream,
|
||||
errorListener: ErrorListener<any>,
|
||||
parseListener?: ESQLParserListener
|
||||
) => {
|
||||
const lexer = getLexer(inputStream, errorListener);
|
||||
|
@ -27,16 +27,15 @@ export const getParser = (
|
|||
parser.addErrorListener(errorListener);
|
||||
|
||||
if (parseListener) {
|
||||
// @ts-expect-error the addParseListener API does exist and is documented here
|
||||
// https://github.com/antlr/antlr4/blob/dev/doc/listeners.md
|
||||
parser.addParseListener(parseListener);
|
||||
}
|
||||
|
||||
return parser;
|
||||
};
|
||||
|
||||
export const getLexer = (
|
||||
inputStream: CodePointCharStream,
|
||||
errorListener: ANTLRErrorListener<any>
|
||||
) => {
|
||||
export const getLexer = (inputStream: CharStream, errorListener: ErrorListener<any>) => {
|
||||
const lexer = new ESQLLexer(inputStream);
|
||||
|
||||
lexer.removeErrorListeners();
|
||||
|
|
|
@ -6,45 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { RecognitionException } from 'antlr4ts';
|
||||
import { esql_parser } from '../../antlr/esql_parser';
|
||||
import type { RecognitionException } from 'antlr4';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
|
||||
function getExpectedSymbols(expectedTokens: RecognitionException['expectedTokens']) {
|
||||
const tokenIds = expectedTokens?.toIntegerList().toArray() || [];
|
||||
const list = [];
|
||||
for (const tokenId of tokenIds) {
|
||||
if (esql_parser.VOCABULARY.getSymbolicName(tokenId)) {
|
||||
const symbol = esql_parser.VOCABULARY.getSymbolicName(tokenId);
|
||||
list.push(symbol === 'EOF' ? `<${symbol}>` : symbol);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
export function createError(exception: RecognitionException) {
|
||||
const token = exception.getOffendingToken();
|
||||
if (token) {
|
||||
const expectedSymbols = getExpectedSymbols(exception.expectedTokens);
|
||||
if (
|
||||
['ASTERISK', 'UNQUOTED_IDENTIFIER', 'QUOTED_IDENTIFIER'].every(
|
||||
(s, i) => expectedSymbols[i] === s
|
||||
)
|
||||
) {
|
||||
return {
|
||||
type: 'error' as const,
|
||||
text: `Unknown column ${token.text}`,
|
||||
location: getPosition(token),
|
||||
};
|
||||
}
|
||||
}
|
||||
const token = exception.offendingToken;
|
||||
|
||||
return {
|
||||
type: 'error' as const,
|
||||
text: token
|
||||
? `SyntaxError: expected {${getExpectedSymbols(exception.expectedTokens).join(
|
||||
', '
|
||||
)}} but found "${token.text}"`
|
||||
: exception.message,
|
||||
text: `SyntaxError: ${exception.message}`,
|
||||
location: getPosition(token),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { ErrorNode, ParserRuleContext, TerminalNode } from 'antlr4';
|
||||
import {
|
||||
type ShowInfoContext,
|
||||
type ShowFunctionsContext,
|
||||
|
@ -25,9 +26,9 @@ import {
|
|||
type ShowCommandContext,
|
||||
type EnrichCommandContext,
|
||||
type WhereCommandContext,
|
||||
esql_parser,
|
||||
default as esql_parser,
|
||||
} from '../../antlr/esql_parser';
|
||||
import { esql_parserListener as ESQLParserListener } from '../../antlr/esql_parser_listener';
|
||||
import { default as ESQLParserListener } from '../../antlr/esql_parser_listener';
|
||||
import { createCommand, createFunction, createOption, createLiteral } from './ast_helpers';
|
||||
import { getPosition } from './ast_position_utils';
|
||||
import {
|
||||
|
@ -62,7 +63,7 @@ export class AstListener implements ESQLParserListener {
|
|||
const commandAst = createCommand('show', ctx);
|
||||
|
||||
this.ast.push(commandAst);
|
||||
commandAst.text = ctx.text;
|
||||
commandAst.text = ctx.getText();
|
||||
commandAst?.args.push(createFunction('info', ctx, getPosition(ctx.INFO().symbol)));
|
||||
}
|
||||
|
||||
|
@ -75,7 +76,7 @@ export class AstListener implements ESQLParserListener {
|
|||
const commandAst = createCommand('show', ctx);
|
||||
this.ast.push(commandAst);
|
||||
// update the text
|
||||
commandAst.text = ctx.text;
|
||||
commandAst.text = ctx.getText();
|
||||
commandAst?.args.push(createFunction('functions', ctx, getPosition(ctx.FUNCTIONS().symbol)));
|
||||
}
|
||||
|
||||
|
@ -119,7 +120,10 @@ export class AstListener implements ESQLParserListener {
|
|||
const metadataContent =
|
||||
metadataContext?.deprecated_metadata()?.metadataOption() || metadataContext?.metadataOption();
|
||||
if (metadataContent) {
|
||||
const option = createOption(metadataContent.METADATA().text.toLowerCase(), metadataContent);
|
||||
const option = createOption(
|
||||
metadataContent.METADATA().getText().toLowerCase(),
|
||||
metadataContent
|
||||
);
|
||||
commandAst.args.push(option);
|
||||
option.args.push(...collectAllColumnIdentifiers(metadataContent));
|
||||
}
|
||||
|
@ -142,13 +146,13 @@ export class AstListener implements ESQLParserListener {
|
|||
exitStatsCommand(ctx: StatsCommandContext) {
|
||||
const command = createCommand('stats', ctx);
|
||||
this.ast.push(command);
|
||||
const fields = ctx.fields();
|
||||
|
||||
// STATS expression is optional
|
||||
if (ctx._stats) {
|
||||
command.args.push(...collectAllFieldsStatements(fields[0]));
|
||||
command.args.push(...collectAllFieldsStatements(ctx.fields(0)));
|
||||
}
|
||||
if (ctx._grouping) {
|
||||
command.args.push(...visitByOption(ctx, ctx._stats ? fields[1] : fields[0]));
|
||||
command.args.push(...visitByOption(ctx, ctx._stats ? ctx.fields(1) : ctx.fields(0)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,7 +163,7 @@ export class AstListener implements ESQLParserListener {
|
|||
exitLimitCommand(ctx: LimitCommandContext) {
|
||||
const command = createCommand('limit', ctx);
|
||||
this.ast.push(command);
|
||||
if (ctx.tryGetToken(esql_parser.INTEGER_LITERAL, 0)) {
|
||||
if (ctx.getToken(esql_parser.INTEGER_LITERAL, 0)) {
|
||||
const literal = createLiteral('number', ctx.INTEGER_LITERAL());
|
||||
if (literal) {
|
||||
command.args.push(literal);
|
||||
|
@ -174,7 +178,7 @@ export class AstListener implements ESQLParserListener {
|
|||
exitSortCommand(ctx: SortCommandContext) {
|
||||
const command = createCommand('sort', ctx);
|
||||
this.ast.push(command);
|
||||
command.args.push(...visitOrderExpression(ctx.orderExpression()));
|
||||
command.args.push(...visitOrderExpression(ctx.orderExpression_list()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -204,7 +208,7 @@ export class AstListener implements ESQLParserListener {
|
|||
exitRenameCommand(ctx: RenameCommandContext) {
|
||||
const command = createCommand('rename', ctx);
|
||||
this.ast.push(command);
|
||||
command.args.push(...visitRenameClauses(ctx.renameClause()));
|
||||
command.args.push(...visitRenameClauses(ctx.renameClause_list()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -254,4 +258,20 @@ export class AstListener implements ESQLParserListener {
|
|||
this.ast.push(command);
|
||||
command.args.push(...getPolicyName(ctx), ...getMatchField(ctx), ...getEnrichClauses(ctx));
|
||||
}
|
||||
|
||||
enterEveryRule(ctx: ParserRuleContext): void {
|
||||
// method not implemented, added to satisfy interface expectation
|
||||
}
|
||||
|
||||
visitErrorNode(node: ErrorNode): void {
|
||||
// method not implemented, added to satisfy interface expectation
|
||||
}
|
||||
|
||||
visitTerminal(node: TerminalNode): void {
|
||||
// method not implemented, added to satisfy interface expectation
|
||||
}
|
||||
|
||||
exitEveryRule(ctx: ParserRuleContext): void {
|
||||
// method not implemented, added to satisfy interface expectation
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,10 +10,7 @@
|
|||
* In case of changes in the grammar, this script should be updated: esql_update_ast_script.js
|
||||
*/
|
||||
|
||||
import { Token } from 'antlr4ts';
|
||||
import type { ParserRuleContext } from 'antlr4ts/ParserRuleContext';
|
||||
import { ErrorNode } from 'antlr4ts/tree/ErrorNode';
|
||||
import type { TerminalNode } from 'antlr4ts/tree/TerminalNode';
|
||||
import { type Token, type ParserRuleContext, type TerminalNode } from 'antlr4';
|
||||
import type {
|
||||
ArithmeticUnaryContext,
|
||||
DecimalValueContext,
|
||||
|
@ -44,7 +41,7 @@ export function createCommand(name: string, ctx: ParserRuleContext): ESQLCommand
|
|||
return {
|
||||
type: 'command',
|
||||
name,
|
||||
text: ctx.text,
|
||||
text: ctx.getText(),
|
||||
args: [],
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
|
@ -54,16 +51,16 @@ export function createCommand(name: string, ctx: ParserRuleContext): ESQLCommand
|
|||
export function createList(ctx: ParserRuleContext, values: ESQLLiteral[]): ESQLList {
|
||||
return {
|
||||
type: 'list',
|
||||
name: ctx.text,
|
||||
name: ctx.getText(),
|
||||
values,
|
||||
text: ctx.text,
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
|
||||
export function createNumericLiteral(ctx: DecimalValueContext | IntegerValueContext): ESQLLiteral {
|
||||
const text = ctx.text;
|
||||
const text = ctx.getText();
|
||||
return {
|
||||
type: 'literal',
|
||||
literalType: 'number',
|
||||
|
@ -79,8 +76,8 @@ export function createFakeMultiplyLiteral(ctx: ArithmeticUnaryContext): ESQLLite
|
|||
return {
|
||||
type: 'literal',
|
||||
literalType: 'number',
|
||||
text: ctx.text,
|
||||
name: ctx.text,
|
||||
text: ctx.getText(),
|
||||
name: ctx.getText(),
|
||||
value: ctx.PLUS() ? 1 : -1,
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception),
|
||||
|
@ -115,7 +112,7 @@ export function createLiteral(
|
|||
if (!node) {
|
||||
return;
|
||||
}
|
||||
const text = node.text;
|
||||
const text = node.getText();
|
||||
return {
|
||||
type: 'literal',
|
||||
literalType: type,
|
||||
|
@ -123,18 +120,20 @@ export function createLiteral(
|
|||
name: text,
|
||||
value: type === 'number' ? Number(text) : text,
|
||||
location: getPosition(node.symbol),
|
||||
incomplete: isMissingText(node.text),
|
||||
incomplete: isMissingText(node.getText()),
|
||||
};
|
||||
}
|
||||
|
||||
export function createTimeUnit(ctx: QualifiedIntegerLiteralContext): ESQLTimeInterval {
|
||||
return {
|
||||
type: 'timeInterval',
|
||||
quantity: Number(ctx.integerValue().text),
|
||||
unit: ctx.UNQUOTED_IDENTIFIER().text,
|
||||
text: ctx.text,
|
||||
quantity: Number(ctx.integerValue().INTEGER_LITERAL().getText()),
|
||||
unit: ctx.UNQUOTED_IDENTIFIER().symbol.text,
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
name: `${ctx.integerValue().text} ${ctx.UNQUOTED_IDENTIFIER().text}`,
|
||||
name: `${ctx.integerValue().INTEGER_LITERAL().getText()} ${
|
||||
ctx.UNQUOTED_IDENTIFIER().symbol.text
|
||||
}`,
|
||||
incomplete: Boolean(ctx.exception),
|
||||
};
|
||||
}
|
||||
|
@ -147,7 +146,7 @@ export function createFunction(
|
|||
return {
|
||||
type: 'function',
|
||||
name,
|
||||
text: ctx.text,
|
||||
text: ctx.getText(),
|
||||
location: customPosition ?? getPosition(ctx.start, ctx.stop),
|
||||
args: [],
|
||||
incomplete: Boolean(ctx.exception),
|
||||
|
@ -203,7 +202,7 @@ export function computeLocationExtends(fn: ESQLFunction) {
|
|||
/* SCRIPT_MARKER_START */
|
||||
function getQuotedText(ctx: ParserRuleContext) {
|
||||
return [66 /* esql_parser.QUOTED_IDENTIFIER */]
|
||||
.map((keyCode) => ctx.tryGetToken(keyCode, 0))
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
|
||||
|
@ -212,7 +211,7 @@ function getUnquotedText(ctx: ParserRuleContext) {
|
|||
65 /* esql_parser.UNQUOTED_IDENTIFIER */, 71 /* esql_parser.FROM_UNQUOTED_IDENTIFIER */,
|
||||
105 /* esql_parser.UNQUOTED_ID_PATTERN */,
|
||||
]
|
||||
.map((keyCode) => ctx.tryGetToken(keyCode, 0))
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
/* SCRIPT_MARKER_END */
|
||||
|
@ -230,11 +229,11 @@ function safeBackticksRemoval(text: string | undefined) {
|
|||
return text?.replace(TICKS_REGEX, '').replace(DOUBLE_TICKS_REGEX, SINGLE_BACKTICK) || '';
|
||||
}
|
||||
|
||||
export function sanifyIdentifierString(ctx: ParserRuleContext) {
|
||||
export function sanitizeIdentifierString(ctx: ParserRuleContext) {
|
||||
return (
|
||||
getUnquotedText(ctx)?.text ||
|
||||
safeBackticksRemoval(getQuotedText(ctx)?.text) ||
|
||||
safeBackticksRemoval(ctx.text) // for some reason some quoted text is not detected correctly by the parser
|
||||
getUnquotedText(ctx)?.getText() ||
|
||||
safeBackticksRemoval(getQuotedText(ctx)?.getText()) ||
|
||||
safeBackticksRemoval(ctx.getText()) // for some reason some quoted text is not detected correctly by the parser
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -247,7 +246,7 @@ export function createSetting(policyName: Token, mode: string): ESQLCommandMode
|
|||
type: 'mode',
|
||||
name: mode.replace('_', '').toLowerCase(),
|
||||
text: mode,
|
||||
location: getPosition(policyName, { stopIndex: policyName.startIndex + mode.length - 1 }), // unfortunately this is the only location we have
|
||||
location: getPosition(policyName, { stop: policyName.start + mode.length - 1 }), // unfortunately this is the only location we have
|
||||
incomplete: false,
|
||||
};
|
||||
}
|
||||
|
@ -263,8 +262,8 @@ export function createPolicy(token: Token, policy: string): ESQLSource {
|
|||
text: policy,
|
||||
sourceType: 'policy',
|
||||
location: getPosition({
|
||||
startIndex: token.stopIndex - policy.length + 1,
|
||||
stopIndex: token.stopIndex,
|
||||
start: token.stop - policy.length + 1,
|
||||
stop: token.stop,
|
||||
}), // take into account ccq modes
|
||||
incomplete: false,
|
||||
};
|
||||
|
@ -274,7 +273,7 @@ export function createSource(
|
|||
ctx: ParserRuleContext,
|
||||
type: 'index' | 'policy' = 'index'
|
||||
): ESQLSource {
|
||||
const text = sanifyIdentifierString(ctx);
|
||||
const text = sanitizeIdentifierString(ctx);
|
||||
return {
|
||||
type: 'source',
|
||||
name: text,
|
||||
|
@ -288,21 +287,21 @@ export function createSource(
|
|||
export function createColumnStar(ctx: TerminalNode): ESQLColumn {
|
||||
return {
|
||||
type: 'column',
|
||||
name: ctx.text,
|
||||
text: ctx.text,
|
||||
name: ctx.getText(),
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.symbol),
|
||||
incomplete: ctx.text === '',
|
||||
incomplete: ctx.getText() === '',
|
||||
quoted: false,
|
||||
};
|
||||
}
|
||||
|
||||
export function createColumn(ctx: ParserRuleContext): ESQLColumn {
|
||||
const text = sanifyIdentifierString(ctx);
|
||||
const hasQuotes = Boolean(getQuotedText(ctx) || isQuoted(ctx.text));
|
||||
const text = sanitizeIdentifierString(ctx);
|
||||
const hasQuotes = Boolean(getQuotedText(ctx) || isQuoted(ctx.getText()));
|
||||
return {
|
||||
type: 'column' as const,
|
||||
name: text,
|
||||
text: ctx.text,
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
incomplete: Boolean(ctx.exception || text === ''),
|
||||
quoted: hasQuotes,
|
||||
|
@ -313,9 +312,15 @@ export function createOption(name: string, ctx: ParserRuleContext): ESQLCommandO
|
|||
return {
|
||||
type: 'option',
|
||||
name,
|
||||
text: ctx.text,
|
||||
text: ctx.getText(),
|
||||
location: getPosition(ctx.start, ctx.stop),
|
||||
args: [],
|
||||
incomplete: Boolean(ctx.exception || ctx.children?.some((c) => c instanceof ErrorNode)),
|
||||
incomplete: Boolean(
|
||||
ctx.exception ||
|
||||
ctx.children?.some((c) => {
|
||||
// @ts-expect-error not exposed in type but exists see https://github.com/antlr/antlr4/blob/v4.11.1/runtime/JavaScript/src/antlr4/tree/ErrorNodeImpl.js#L19
|
||||
return Boolean(c.isErrorNode);
|
||||
})
|
||||
),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -6,20 +6,19 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Token } from 'antlr4ts';
|
||||
import type { Token } from 'antlr4';
|
||||
|
||||
export function getPosition(
|
||||
token: Pick<Token, 'startIndex' | 'stopIndex'> | undefined,
|
||||
lastToken?: Pick<Token, 'stopIndex'> | undefined
|
||||
token: Pick<Token, 'start' | 'stop'> | null,
|
||||
lastToken?: Pick<Token, 'stop'> | undefined
|
||||
) {
|
||||
if (!token || token.startIndex < 0) {
|
||||
if (!token || token.start < 0) {
|
||||
return { min: 0, max: 0 };
|
||||
}
|
||||
const endFirstToken =
|
||||
token.stopIndex > -1 ? Math.max(token.stopIndex + 1, token.startIndex) : undefined;
|
||||
const endLastToken = lastToken?.stopIndex;
|
||||
const endFirstToken = token.stop > -1 ? Math.max(token.stop + 1, token.start) : undefined;
|
||||
const endLastToken = lastToken?.stop;
|
||||
return {
|
||||
min: token.startIndex,
|
||||
min: token.start,
|
||||
max: endLastToken ?? endFirstToken ?? Infinity,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -6,8 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { ParserRuleContext } from 'antlr4ts/ParserRuleContext';
|
||||
import { type ParserRuleContext } from 'antlr4';
|
||||
import {
|
||||
default as esql_parser,
|
||||
ArithmeticBinaryContext,
|
||||
ArithmeticUnaryContext,
|
||||
BooleanArrayLiteralContext,
|
||||
|
@ -25,7 +26,6 @@ import {
|
|||
type DissectCommandContext,
|
||||
type DropCommandContext,
|
||||
type EnrichCommandContext,
|
||||
esql_parser,
|
||||
type FieldContext,
|
||||
type FieldsContext,
|
||||
type FromCommandContext,
|
||||
|
@ -69,7 +69,7 @@ import {
|
|||
createFakeMultiplyLiteral,
|
||||
createList,
|
||||
createNumericLiteral,
|
||||
sanifyIdentifierString,
|
||||
sanitizeIdentifierString,
|
||||
computeLocationExtends,
|
||||
createColumnStar,
|
||||
wrapIdentifierAsArray,
|
||||
|
@ -87,25 +87,27 @@ import type {
|
|||
} from './types';
|
||||
|
||||
export function collectAllSourceIdentifiers(ctx: FromCommandContext): ESQLAstItem[] {
|
||||
return ctx.getRuleContexts(FromIdentifierContext).map((sourceCtx) => createSource(sourceCtx));
|
||||
return ctx
|
||||
.getTypedRuleContexts(FromIdentifierContext)
|
||||
.map((sourceCtx) => createSource(sourceCtx));
|
||||
}
|
||||
|
||||
function extractIdentifiers(
|
||||
ctx: KeepCommandContext | DropCommandContext | MvExpandCommandContext | MetadataOptionContext
|
||||
) {
|
||||
if (ctx instanceof MetadataOptionContext) {
|
||||
return wrapIdentifierAsArray(ctx.fromIdentifier());
|
||||
return wrapIdentifierAsArray(ctx.fromIdentifier_list());
|
||||
}
|
||||
if (ctx instanceof MvExpandCommandContext) {
|
||||
return wrapIdentifierAsArray(ctx.qualifiedName());
|
||||
}
|
||||
return wrapIdentifierAsArray(ctx.qualifiedNamePattern());
|
||||
return wrapIdentifierAsArray(ctx.qualifiedNamePattern_list());
|
||||
}
|
||||
|
||||
function makeColumnsOutOfIdentifiers(identifiers: ParserRuleContext[]) {
|
||||
const args: ESQLColumn[] =
|
||||
identifiers
|
||||
.filter((child) => textExistsAndIsValid(child.text))
|
||||
.filter((child) => textExistsAndIsValid(child.getText()))
|
||||
.map((sourceContext) => {
|
||||
return createColumn(sourceContext);
|
||||
}) ?? [];
|
||||
|
@ -137,8 +139,8 @@ export function getMatchField(ctx: EnrichCommandContext) {
|
|||
}
|
||||
const identifier = ctx.qualifiedNamePattern();
|
||||
if (identifier) {
|
||||
const fn = createOption(ctx.ON()!.text.toLowerCase(), ctx);
|
||||
if (textExistsAndIsValid(identifier.text)) {
|
||||
const fn = createOption(ctx.ON()!.getText().toLowerCase(), ctx);
|
||||
if (textExistsAndIsValid(identifier.getText())) {
|
||||
fn.args.push(createColumn(identifier));
|
||||
}
|
||||
// overwrite the location inferring the correct position
|
||||
|
@ -151,21 +153,21 @@ export function getMatchField(ctx: EnrichCommandContext) {
|
|||
export function getEnrichClauses(ctx: EnrichCommandContext) {
|
||||
const ast: ESQLCommandOption[] = [];
|
||||
if (ctx.WITH()) {
|
||||
const option = createOption(ctx.WITH()!.text.toLowerCase(), ctx);
|
||||
const option = createOption(ctx.WITH()!.getText().toLowerCase(), ctx);
|
||||
ast.push(option);
|
||||
const clauses = ctx.enrichWithClause();
|
||||
const clauses = ctx.enrichWithClause_list();
|
||||
for (const clause of clauses) {
|
||||
if (clause._enrichField) {
|
||||
const args = [];
|
||||
if (clause.ASSIGN()) {
|
||||
args.push(createColumn(clause._newName));
|
||||
if (textExistsAndIsValid(clause._enrichField?.text)) {
|
||||
if (textExistsAndIsValid(clause._enrichField?.getText())) {
|
||||
args.push(createColumn(clause._enrichField));
|
||||
}
|
||||
} else {
|
||||
// if an explicit assign is not set, create a fake assign with
|
||||
// both left and right value with the same column
|
||||
if (textExistsAndIsValid(clause._enrichField?.text)) {
|
||||
if (textExistsAndIsValid(clause._enrichField?.getText())) {
|
||||
args.push(createColumn(clause._enrichField), createColumn(clause._enrichField));
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +204,7 @@ function visitLogicalAndsOrs(ctx: LogicalBinaryContext) {
|
|||
|
||||
function visitLogicalIns(ctx: LogicalInContext) {
|
||||
const fn = createFunction(ctx.NOT() ? 'not_in' : 'in', ctx);
|
||||
const [left, ...list] = ctx.valueExpression();
|
||||
const [left, ...list] = ctx.valueExpression_list();
|
||||
const leftArg = visitValueExpression(left);
|
||||
if (leftArg) {
|
||||
fn.args.push(...(Array.isArray(leftArg) ? leftArg : [leftArg]));
|
||||
|
@ -211,7 +213,7 @@ function visitLogicalIns(ctx: LogicalInContext) {
|
|||
.filter(nonNullable)
|
||||
.flatMap((arg) => (Array.isArray(arg) ? arg.filter(nonNullable) : arg));
|
||||
// distinguish between missing brackets (missing text error) and an empty list
|
||||
if (textExistsAndIsValid(ctx.text)) {
|
||||
if (textExistsAndIsValid(ctx.getText())) {
|
||||
fn.args.push(listArgs);
|
||||
}
|
||||
}
|
||||
|
@ -223,29 +225,16 @@ function visitLogicalIns(ctx: LogicalInContext) {
|
|||
|
||||
function getMathOperation(ctx: ArithmeticBinaryContext) {
|
||||
return (
|
||||
ctx.PLUS()?.text ||
|
||||
ctx.MINUS()?.text ||
|
||||
ctx.ASTERISK()?.text ||
|
||||
ctx.SLASH()?.text ||
|
||||
ctx.PERCENT()?.text ||
|
||||
''
|
||||
(ctx.PLUS() || ctx.MINUS() || ctx.ASTERISK() || ctx.SLASH() || ctx.PERCENT()).getText() || ''
|
||||
);
|
||||
}
|
||||
|
||||
function getComparisonName(ctx: ComparisonOperatorContext) {
|
||||
return (
|
||||
ctx.EQ()?.text ||
|
||||
ctx.NEQ()?.text ||
|
||||
ctx.LT()?.text ||
|
||||
ctx.LTE()?.text ||
|
||||
ctx.GT()?.text ||
|
||||
ctx.GTE()?.text ||
|
||||
''
|
||||
);
|
||||
return (ctx.EQ() || ctx.NEQ() || ctx.LT() || ctx.LTE() || ctx.GT() || ctx.GTE()).getText() || '';
|
||||
}
|
||||
|
||||
function visitValueExpression(ctx: ValueExpressionContext) {
|
||||
if (!textExistsAndIsValid(ctx.text)) {
|
||||
if (!textExistsAndIsValid(ctx.getText())) {
|
||||
return [];
|
||||
}
|
||||
if (ctx instanceof ValueExpressionDefaultContext) {
|
||||
|
@ -322,7 +311,7 @@ function getConstant(ctx: ConstantContext | undefined): ESQLAstItem | undefined
|
|||
return getBooleanValue(ctx);
|
||||
}
|
||||
if (ctx instanceof StringLiteralContext) {
|
||||
return createLiteral('string', ctx.string().STRING());
|
||||
return createLiteral('string', ctx.string_().STRING());
|
||||
}
|
||||
if (
|
||||
ctx instanceof NumericArrayLiteralContext ||
|
||||
|
@ -330,14 +319,14 @@ function getConstant(ctx: ConstantContext | undefined): ESQLAstItem | undefined
|
|||
ctx instanceof StringArrayLiteralContext
|
||||
) {
|
||||
const values: ESQLLiteral[] = [];
|
||||
for (const numericValue of ctx.getRuleContexts(NumericValueContext)) {
|
||||
for (const numericValue of ctx.getTypedRuleContexts(NumericValueContext)) {
|
||||
const value = numericValue.decimalValue() || numericValue.integerValue();
|
||||
values.push(createNumericLiteral(value!));
|
||||
}
|
||||
for (const booleanValue of ctx.getRuleContexts(BooleanValueContext)) {
|
||||
for (const booleanValue of ctx.getTypedRuleContexts(BooleanValueContext)) {
|
||||
values.push(getBooleanValue(booleanValue)!);
|
||||
}
|
||||
for (const string of ctx.getRuleContexts(StringContext)) {
|
||||
for (const string of ctx.getTypedRuleContexts(StringContext)) {
|
||||
const literal = createLiteral('string', string.STRING());
|
||||
if (literal) {
|
||||
values.push(literal);
|
||||
|
@ -350,16 +339,16 @@ function getConstant(ctx: ConstantContext | undefined): ESQLAstItem | undefined
|
|||
export function visitRenameClauses(clausesCtx: RenameClauseContext[]): ESQLAstItem[] {
|
||||
return clausesCtx
|
||||
.map((clause) => {
|
||||
const asToken = clause.tryGetToken(esql_parser.AS, 0);
|
||||
const asToken = clause.getToken(esql_parser.AS, 0);
|
||||
if (asToken) {
|
||||
const fn = createOption(asToken.text.toLowerCase(), clause);
|
||||
const fn = createOption(asToken.getText().toLowerCase(), clause);
|
||||
for (const arg of [clause._oldName, clause._newName]) {
|
||||
if (textExistsAndIsValid(arg.text)) {
|
||||
if (textExistsAndIsValid(arg.getText())) {
|
||||
fn.args.push(createColumn(arg));
|
||||
}
|
||||
}
|
||||
return fn;
|
||||
} else if (textExistsAndIsValid(clause._oldName?.text)) {
|
||||
} else if (textExistsAndIsValid(clause._oldName?.getText())) {
|
||||
return createColumn(clause._oldName);
|
||||
}
|
||||
})
|
||||
|
@ -380,7 +369,7 @@ export function visitPrimaryExpression(
|
|||
}
|
||||
if (ctx instanceof FunctionContext) {
|
||||
const functionExpressionCtx = ctx.functionExpression();
|
||||
const fn = createFunction(functionExpressionCtx.identifier().text.toLowerCase(), ctx);
|
||||
const fn = createFunction(functionExpressionCtx.identifier().getText().toLowerCase(), ctx);
|
||||
const asteriskArg = functionExpressionCtx.ASTERISK()
|
||||
? createColumnStar(functionExpressionCtx.ASTERISK()!)
|
||||
: undefined;
|
||||
|
@ -388,7 +377,7 @@ export function visitPrimaryExpression(
|
|||
fn.args.push(asteriskArg);
|
||||
}
|
||||
const functionArgs = functionExpressionCtx
|
||||
.booleanExpression()
|
||||
.booleanExpression_list()
|
||||
.flatMap(collectBooleanExpression)
|
||||
.filter(nonNullable);
|
||||
if (functionArgs.length) {
|
||||
|
@ -412,7 +401,7 @@ export function collectLogicalExpression(ctx: BooleanExpressionContext) {
|
|||
}
|
||||
|
||||
function collectRegexExpression(ctx: BooleanExpressionContext): ESQLFunction[] {
|
||||
const regexes = ctx.getRuleContexts(RegexBooleanExpressionContext);
|
||||
const regexes = ctx.getTypedRuleContexts(RegexBooleanExpressionContext);
|
||||
const ret: ESQLFunction[] = [];
|
||||
return ret.concat(
|
||||
regexes.map((regex) => {
|
||||
|
@ -472,7 +461,7 @@ export function collectBooleanExpression(ctx: BooleanExpressionContext | undefin
|
|||
|
||||
export function visitField(ctx: FieldContext) {
|
||||
if (ctx.qualifiedName() && ctx.ASSIGN()) {
|
||||
const fn = createFunction(ctx.ASSIGN()!.text, ctx);
|
||||
const fn = createFunction(ctx.ASSIGN()!.getText(), ctx);
|
||||
fn.args.push(
|
||||
createColumn(ctx.qualifiedName()!),
|
||||
collectBooleanExpression(ctx.booleanExpression())
|
||||
|
@ -491,7 +480,7 @@ export function collectAllFieldsStatements(ctx: FieldsContext | undefined): ESQL
|
|||
return ast;
|
||||
}
|
||||
try {
|
||||
for (const field of ctx.field()) {
|
||||
for (const field of ctx.field_list()) {
|
||||
ast.push(...visitField(field));
|
||||
}
|
||||
} catch (e) {
|
||||
|
@ -504,7 +493,7 @@ export function visitByOption(ctx: StatsCommandContext, expr: FieldsContext | un
|
|||
if (!ctx.BY() || !expr) {
|
||||
return [];
|
||||
}
|
||||
const option = createOption(ctx.BY()!.text.toLowerCase(), ctx);
|
||||
const option = createOption(ctx.BY()!.getText().toLowerCase(), ctx);
|
||||
option.args.push(...collectAllFieldsStatements(expr));
|
||||
return [option];
|
||||
}
|
||||
|
@ -515,7 +504,7 @@ export function visitOrderExpression(ctx: OrderExpressionContext[]) {
|
|||
const expression = collectBooleanExpression(orderCtx.booleanExpression());
|
||||
if (orderCtx._ordering) {
|
||||
const terminalNode =
|
||||
orderCtx.tryGetToken(esql_parser.ASC, 0) || orderCtx.tryGetToken(esql_parser.DESC, 0);
|
||||
orderCtx.getToken(esql_parser.ASC, 0) || orderCtx.getToken(esql_parser.DESC, 0);
|
||||
const literal = createLiteral('string', terminalNode);
|
||||
if (literal) {
|
||||
expression.push(literal);
|
||||
|
@ -525,7 +514,7 @@ export function visitOrderExpression(ctx: OrderExpressionContext[]) {
|
|||
expression.push(createLiteral('string', orderCtx.NULLS()!)!);
|
||||
if (orderCtx._nullOrdering) {
|
||||
const innerTerminalNode =
|
||||
orderCtx.tryGetToken(esql_parser.FIRST, 0) || orderCtx.tryGetToken(esql_parser.LAST, 0);
|
||||
orderCtx.getToken(esql_parser.FIRST, 0) || orderCtx.getToken(esql_parser.LAST, 0);
|
||||
const literal = createLiteral('string', innerTerminalNode);
|
||||
if (literal) {
|
||||
expression.push(literal);
|
||||
|
@ -541,20 +530,22 @@ export function visitOrderExpression(ctx: OrderExpressionContext[]) {
|
|||
}
|
||||
|
||||
export function visitDissect(ctx: DissectCommandContext) {
|
||||
const pattern = ctx.string().tryGetToken(esql_parser.STRING, 0);
|
||||
const pattern = ctx.string_().getToken(esql_parser.STRING, 0);
|
||||
return [
|
||||
visitPrimaryExpression(ctx.primaryExpression()),
|
||||
...(pattern && textExistsAndIsValid(pattern.text)
|
||||
...(pattern && textExistsAndIsValid(pattern.getText())
|
||||
? [createLiteral('string', pattern), ...visitDissectOptions(ctx.commandOptions())]
|
||||
: []),
|
||||
].filter(nonNullable);
|
||||
}
|
||||
|
||||
export function visitGrok(ctx: GrokCommandContext) {
|
||||
const pattern = ctx.string().tryGetToken(esql_parser.STRING, 0);
|
||||
const pattern = ctx.string_().getToken(esql_parser.STRING, 0);
|
||||
return [
|
||||
visitPrimaryExpression(ctx.primaryExpression()),
|
||||
...(pattern && textExistsAndIsValid(pattern.text) ? [createLiteral('string', pattern)] : []),
|
||||
...(pattern && textExistsAndIsValid(pattern.getText())
|
||||
? [createLiteral('string', pattern)]
|
||||
: []),
|
||||
].filter(nonNullable);
|
||||
}
|
||||
|
||||
|
@ -563,9 +554,9 @@ function visitDissectOptions(ctx: CommandOptionsContext | undefined) {
|
|||
return [];
|
||||
}
|
||||
const options: ESQLCommandOption[] = [];
|
||||
for (const optionCtx of ctx.commandOption()) {
|
||||
for (const optionCtx of ctx.commandOption_list()) {
|
||||
const option = createOption(
|
||||
sanifyIdentifierString(optionCtx.identifier()).toLowerCase(),
|
||||
sanitizeIdentifierString(optionCtx.identifier()).toLowerCase(),
|
||||
optionCtx
|
||||
);
|
||||
options.push(option);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { monaco } from '../../../../monaco_imports';
|
||||
import { CharStreams } from 'antlr4ts';
|
||||
import { CharStreams } from 'antlr4';
|
||||
import { suggest } from './autocomplete';
|
||||
import { getParser, ROOT_STATEMENT } from '../../antlr_facade';
|
||||
import { ESQLErrorListener } from '../../monaco/esql_error_listener';
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { EditorError } from '../../../../types';
|
||||
import { CharStreams } from 'antlr4ts';
|
||||
import { CharStreams } from 'antlr4';
|
||||
import { getActions } from './actions';
|
||||
import { getParser, ROOT_STATEMENT } from '../../antlr_facade';
|
||||
import { ESQLErrorListener } from '../../monaco/esql_error_listener';
|
||||
|
|
|
@ -332,10 +332,17 @@ function wrapIntoSpellingChangeAction(
|
|||
);
|
||||
}
|
||||
|
||||
function inferCodeFromError(error: monaco.editor.IMarkerData & { owner?: string }) {
|
||||
if (error.message.includes('missing STRING')) {
|
||||
const [, value] = error.message.split('at ');
|
||||
return value.startsWith("'") && value.endsWith("'") ? 'wrongQuotes' : undefined;
|
||||
function extractQuotedText(rawText: string, error: monaco.editor.IMarkerData) {
|
||||
return rawText.substring(error.startColumn - 2, error.endColumn);
|
||||
}
|
||||
|
||||
function inferCodeFromError(
|
||||
error: monaco.editor.IMarkerData & { owner?: string },
|
||||
rawText: string
|
||||
) {
|
||||
if (error.message.endsWith('expecting STRING')) {
|
||||
const value = extractQuotedText(rawText, error);
|
||||
return /^'(.)*'$/.test(value) ? 'wrongQuotes' : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,7 +378,7 @@ export async function getActions(
|
|||
// so unless there are multiple error/markers for the same area, there's just one
|
||||
// in some cases, like syntax + semantic errors (i.e. unquoted fields eval field-1 ), there might be more than one
|
||||
for (const error of context.markers) {
|
||||
const code = error.code ?? inferCodeFromError(error);
|
||||
const code = error.code ?? inferCodeFromError(error, innerText);
|
||||
switch (code) {
|
||||
case 'unknownColumn':
|
||||
const [columnsSpellChanges, columnsQuotedChanges] = await Promise.all([
|
||||
|
@ -421,7 +428,7 @@ export async function getActions(
|
|||
break;
|
||||
case 'wrongQuotes':
|
||||
// it is a syntax error, so location won't be helpful here
|
||||
const [, errorText] = error.message.split('at ');
|
||||
const errorText = extractQuotedText(innerText, error);
|
||||
actions.push(
|
||||
createAction(
|
||||
i18n.translate('monaco.esql.quickfix.replaceWithQuote', {
|
||||
|
@ -429,7 +436,11 @@ export async function getActions(
|
|||
}),
|
||||
errorText.replaceAll("'", '"'),
|
||||
// override the location
|
||||
{ ...error, endColumn: error.startColumn + errorText.length },
|
||||
{
|
||||
...error,
|
||||
startColumn: error.startColumn - 1,
|
||||
endColumn: error.startColumn + errorText.length,
|
||||
},
|
||||
model.uri
|
||||
)
|
||||
);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { monaco } from '../../../../monaco_imports';
|
||||
import { CharStreams } from 'antlr4ts';
|
||||
import { CharStreams } from 'antlr4';
|
||||
import { getParser, ROOT_STATEMENT } from '../../antlr_facade';
|
||||
import { ESQLErrorListener } from '../../monaco/esql_error_listener';
|
||||
import { AstListener } from '../ast_factory';
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
import type { ESQLCallbacks } from './types';
|
||||
import type { ESQLRealField } from '../validation/types';
|
||||
import { ESQLAst } from '../types';
|
||||
import type { ESQLAst } from '../types';
|
||||
|
||||
export function buildQueryUntilPreviousCommand(ast: ESQLAst, queryString: string) {
|
||||
const prevCommand = ast[Math.max(ast.length - 2, 0)];
|
||||
|
|
|
@ -11,6 +11,9 @@ import type { ESQLPolicy } from './types';
|
|||
|
||||
export function buildQueryForFieldsFromSource(queryString: string, ast: ESQLAst) {
|
||||
const firstCommand = ast[0];
|
||||
if (firstCommand == null) {
|
||||
return '';
|
||||
}
|
||||
return queryString.substring(0, firstCommand.location.max + 1);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CharStreams } from 'antlr4ts';
|
||||
import { CharStreams } from 'antlr4';
|
||||
import { getParser, ROOT_STATEMENT } from '../../antlr_facade';
|
||||
import { join } from 'path';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
@ -345,13 +345,15 @@ describe('validation logic', () => {
|
|||
['eval', 'stats', 'rename', 'limit', 'keep', 'drop', 'mv_expand', 'dissect', 'grok'].map(
|
||||
(command) =>
|
||||
testErrorsAndWarnings(command, [
|
||||
`SyntaxError: expected {EXPLAIN, FROM, ROW, SHOW} but found "${command}"`,
|
||||
`SyntaxError: mismatched input '${command}' expecting {EXPLAIN, FROM, ROW, SHOW}`,
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
describe('from', () => {
|
||||
testErrorsAndWarnings('f', ['SyntaxError: expected {EXPLAIN, FROM, ROW, SHOW} but found "f"']);
|
||||
testErrorsAndWarnings('f', [
|
||||
"SyntaxError: mismatched input 'f' expecting {EXPLAIN, FROM, ROW, SHOW}",
|
||||
]);
|
||||
testErrorsAndWarnings(`from `, [
|
||||
"SyntaxError: missing {QUOTED_IDENTIFIER, FROM_UNQUOTED_IDENTIFIER} at '<EOF>'",
|
||||
]);
|
||||
|
@ -359,7 +361,7 @@ describe('validation logic', () => {
|
|||
"SyntaxError: missing {QUOTED_IDENTIFIER, FROM_UNQUOTED_IDENTIFIER} at '<EOF>'",
|
||||
]);
|
||||
testErrorsAndWarnings(`from assignment = 1`, [
|
||||
'SyntaxError: expected {<EOF>, PIPE, COMMA, OPENING_BRACKET, METADATA} but found "="',
|
||||
"SyntaxError: mismatched input '=' expecting <EOF>",
|
||||
'Unknown index [assignment]',
|
||||
]);
|
||||
testErrorsAndWarnings(`from index`, []);
|
||||
|
@ -399,8 +401,8 @@ describe('validation logic', () => {
|
|||
`from index ${setWrapping('metadata _id, _source')} ${setWrapping('METADATA _id2')}`,
|
||||
[
|
||||
isWrapped
|
||||
? 'SyntaxError: expected {COMMA, CLOSING_BRACKET} but found "["'
|
||||
: 'SyntaxError: expected {<EOF>, PIPE, COMMA} but found "METADATA"',
|
||||
? "SyntaxError: mismatched input '[' expecting <EOF>"
|
||||
: "SyntaxError: mismatched input 'METADATA' expecting <EOF>",
|
||||
],
|
||||
addBracketsWarning()
|
||||
);
|
||||
|
@ -417,7 +419,7 @@ describe('validation logic', () => {
|
|||
);
|
||||
}
|
||||
testErrorsAndWarnings(`from index (metadata _id)`, [
|
||||
'SyntaxError: expected {<EOF>, PIPE, COMMA, OPENING_BRACKET, METADATA} but found "(metadata"',
|
||||
"SyntaxError: mismatched input '(metadata' expecting <EOF>",
|
||||
]);
|
||||
testErrorsAndWarnings(`from ind*, other*`, []);
|
||||
testErrorsAndWarnings(`from index*`, []);
|
||||
|
@ -438,7 +440,7 @@ describe('validation logic', () => {
|
|||
|
||||
describe('row', () => {
|
||||
testErrorsAndWarnings('row', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('row missing_column', ['Unknown column [missing_column]']);
|
||||
testErrorsAndWarnings('row fn()', ['Unknown function [fn]']);
|
||||
|
@ -455,7 +457,7 @@ describe('validation logic', () => {
|
|||
testErrorsAndWarnings('row a = null', []);
|
||||
testErrorsAndWarnings('row a = (1)', []);
|
||||
testErrorsAndWarnings('row a = (1, 2, 3)', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found ","',
|
||||
"SyntaxError: no viable alternative at input '(1,'",
|
||||
"SyntaxError: extraneous input ')' expecting <EOF>",
|
||||
]);
|
||||
for (const bool of ['true', 'false']) {
|
||||
|
@ -463,12 +465,16 @@ describe('validation logic', () => {
|
|||
testErrorsAndWarnings(`row NOT ${bool}`, []);
|
||||
}
|
||||
|
||||
testErrorsAndWarnings('row var = 1 in ', ['SyntaxError: expected {LP} but found "<EOF>"']);
|
||||
testErrorsAndWarnings('row var = 1 in ', [
|
||||
"SyntaxError: mismatched input '<EOF>' expecting '('",
|
||||
]);
|
||||
testErrorsAndWarnings('row var = 1 in (', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
'Error: [in] function expects exactly 2 arguments, got 1.',
|
||||
]);
|
||||
testErrorsAndWarnings('row var = 1 not in ', ['SyntaxError: expected {LP} but found "<EOF>"']);
|
||||
testErrorsAndWarnings('row var = 1 not in ', [
|
||||
"SyntaxError: mismatched input '<EOF>' expecting '('",
|
||||
]);
|
||||
testErrorsAndWarnings('row var = 1 in (1, 2, 3)', []);
|
||||
testErrorsAndWarnings('row var = 5 in (1, 2, 3)', []);
|
||||
testErrorsAndWarnings('row var = 5 not in (1, 2, 3)', []);
|
||||
|
@ -642,7 +648,7 @@ describe('validation logic', () => {
|
|||
});
|
||||
|
||||
describe('show', () => {
|
||||
testErrorsAndWarnings('show', ['SyntaxError: expected {SHOW} but found "<EOF>"']);
|
||||
testErrorsAndWarnings('show', ["SyntaxError: no viable alternative at input 'show'"]);
|
||||
testErrorsAndWarnings('show functions', []);
|
||||
testErrorsAndWarnings('show info', []);
|
||||
testErrorsAndWarnings('show functions()', [
|
||||
|
@ -666,7 +672,7 @@ describe('validation logic', () => {
|
|||
"SyntaxError: token recognition error at: 'e'",
|
||||
"SyntaxError: token recognition error at: 'l'",
|
||||
"SyntaxError: token recognition error at: 'd'",
|
||||
'SyntaxError: expected {SHOW} but found "<EOF>"',
|
||||
"SyntaxError: no viable alternative at input 'show '",
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -676,16 +682,16 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings('from index | limit 4 ', []);
|
||||
testErrorsAndWarnings('from index | limit 4.5', [
|
||||
'SyntaxError: expected {INTEGER_LITERAL} but found "4.5"',
|
||||
"SyntaxError: mismatched input '4.5' expecting INTEGER_LITERAL",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | limit a', [
|
||||
'SyntaxError: expected {INTEGER_LITERAL} but found "a"',
|
||||
"SyntaxError: mismatched input 'a' expecting INTEGER_LITERAL",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | limit numberField', [
|
||||
'SyntaxError: expected {INTEGER_LITERAL} but found "numberField"',
|
||||
"SyntaxError: mismatched input 'numberField' expecting INTEGER_LITERAL",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | limit stringField', [
|
||||
'SyntaxError: expected {INTEGER_LITERAL} but found "stringField"',
|
||||
"SyntaxError: mismatched input 'stringField' expecting INTEGER_LITERAL",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | limit 4', []);
|
||||
});
|
||||
|
@ -706,16 +712,16 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings('from index | keep `any#Char$Field`', []);
|
||||
testErrorsAndWarnings('from index | project ', [
|
||||
`SyntaxError: expected {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE} but found \"project\"`,
|
||||
"SyntaxError: mismatched input 'project' expecting {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE}",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | project stringField, numberField, dateField', [
|
||||
`SyntaxError: expected {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE} but found \"project\"`,
|
||||
"SyntaxError: mismatched input 'project' expecting {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE}",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | PROJECT stringField, numberField, dateField', [
|
||||
`SyntaxError: expected {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE} but found \"PROJECT\"`,
|
||||
"SyntaxError: mismatched input 'PROJECT' expecting {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE}",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | project missingField, numberField, dateField', [
|
||||
`SyntaxError: expected {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE} but found \"project\"`,
|
||||
"SyntaxError: mismatched input 'project' expecting {DISSECT, DROP, ENRICH, EVAL, GROK, INLINESTATS, KEEP, LIMIT, MV_EXPAND, RENAME, SORT, STATS, WHERE}",
|
||||
]);
|
||||
testErrorsAndWarnings('from index | keep s*', []);
|
||||
testErrorsAndWarnings('from index | keep *Field', []);
|
||||
|
@ -808,12 +814,14 @@ describe('validation logic', () => {
|
|||
});
|
||||
|
||||
describe('rename', () => {
|
||||
testErrorsAndWarnings('from a_index | rename', ["SyntaxError: missing ID_PATTERN at '<EOF>'"]);
|
||||
testErrorsAndWarnings('from a_index | rename', [
|
||||
"SyntaxError: mismatched input '<EOF>' expecting ID_PATTERN",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | rename stringField', [
|
||||
'SyntaxError: expected {DOT, AS} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting AS",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | rename a', [
|
||||
'SyntaxError: expected {DOT, AS} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting AS",
|
||||
'Unknown column [a]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | rename stringField as', [
|
||||
|
@ -850,26 +858,27 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings('row a = 10 | rename a as `this``is fine`', []);
|
||||
testErrorsAndWarnings('row a = 10 | rename a as this is fine', [
|
||||
'SyntaxError: expected {DOT, AS} but found "is"',
|
||||
"SyntaxError: mismatched input 'is' expecting <EOF>",
|
||||
]);
|
||||
});
|
||||
|
||||
describe('dissect', () => {
|
||||
testErrorsAndWarnings('from a_index | dissect', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField', [
|
||||
"SyntaxError: missing STRING at '<EOF>'",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField 2', [
|
||||
'SyntaxError: expected {STRING, DOT} but found "2"',
|
||||
"SyntaxError: mismatched input '2' expecting STRING",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField .', [
|
||||
"SyntaxError: missing {UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} at '<EOF>'",
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
'Unknown column [stringField.]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField %a', [
|
||||
"SyntaxError: missing STRING at '%'",
|
||||
"SyntaxError: mismatched input '%' expecting STRING",
|
||||
"SyntaxError: mismatched input '<EOF>' expecting '='",
|
||||
]);
|
||||
// Do not try to validate the dissect pattern string
|
||||
testErrorsAndWarnings('from a_index | dissect stringField "%{firstWord}"', []);
|
||||
|
@ -877,10 +886,10 @@ describe('validation logic', () => {
|
|||
'DISSECT only supports string type values, found [numberField] of type [number]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField "%{firstWord}" option ', [
|
||||
'SyntaxError: expected {ASSIGN} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting '='",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField "%{firstWord}" option = ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, NULL, '?', TRUE, '+', '-', OPENING_BRACKET}",
|
||||
'Invalid option for DISSECT: [option]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | dissect stringField "%{firstWord}" option = 1', [
|
||||
|
@ -906,20 +915,20 @@ describe('validation logic', () => {
|
|||
|
||||
describe('grok', () => {
|
||||
testErrorsAndWarnings('from a_index | grok', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | grok stringField', [
|
||||
"SyntaxError: missing STRING at '<EOF>'",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | grok stringField 2', [
|
||||
'SyntaxError: expected {STRING, DOT} but found "2"',
|
||||
"SyntaxError: mismatched input '2' expecting STRING",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | grok stringField .', [
|
||||
"SyntaxError: missing {UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} at '<EOF>'",
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
'Unknown column [stringField.]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | grok stringField %a', [
|
||||
"SyntaxError: missing STRING at '%'",
|
||||
"SyntaxError: mismatched input '%' expecting STRING",
|
||||
]);
|
||||
// Do not try to validate the grok pattern string
|
||||
testErrorsAndWarnings('from a_index | grok stringField "%{firstWord}"', []);
|
||||
|
@ -1103,29 +1112,29 @@ describe('validation logic', () => {
|
|||
|
||||
describe('eval', () => {
|
||||
testErrorsAndWarnings('from a_index | eval ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval stringField ', []);
|
||||
testErrorsAndWarnings('from a_index | eval b = stringField', []);
|
||||
testErrorsAndWarnings('from a_index | eval numberField + 1', []);
|
||||
testErrorsAndWarnings('from a_index | eval numberField + ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: no viable alternative at input 'numberField + '",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval stringField + 1', [
|
||||
'Argument of [+] must be [number], found value [stringField] type [string]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval a=b', ['Unknown column [b]']);
|
||||
testErrorsAndWarnings('from a_index | eval a=b, ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
'Unknown column [b]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval a=round', ['Unknown column [round]']);
|
||||
testErrorsAndWarnings('from a_index | eval a=round(', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: no viable alternative at input 'round('",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval a=round(numberField) ', []);
|
||||
testErrorsAndWarnings('from a_index | eval a=round(numberField), ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval a=round(numberField) + round(numberField) ', []);
|
||||
testErrorsAndWarnings('from a_index | eval a=round(numberField) + round(stringField) ', [
|
||||
|
@ -1475,16 +1484,16 @@ describe('validation logic', () => {
|
|||
[]
|
||||
);
|
||||
testErrorsAndWarnings('from a_index | eval not', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
'Error: [not] function expects exactly one argument, got 0.',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | eval in', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "in"',
|
||||
"SyntaxError: mismatched input 'in' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
|
||||
testErrorsAndWarnings('from a_index | eval stringField in stringField', [
|
||||
"SyntaxError: missing '(' at 'stringField'",
|
||||
'SyntaxError: expected {COMMA, RP} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {',', ')'}",
|
||||
]);
|
||||
|
||||
testErrorsAndWarnings('from a_index | eval stringField in stringField)', [
|
||||
|
@ -1493,7 +1502,7 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings('from a_index | eval stringField not in stringField', [
|
||||
"SyntaxError: missing '(' at 'stringField'",
|
||||
'SyntaxError: expected {COMMA, RP} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {',', ')'}",
|
||||
]);
|
||||
|
||||
describe('date math', () => {
|
||||
|
@ -1548,16 +1557,16 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings('from a_index | stats by stringField', []);
|
||||
testErrorsAndWarnings('from a_index | stats by ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats numberField ', [
|
||||
'Expected an aggregate function or group but got [numberField] of type [FieldAttribute]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats numberField=', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats numberField=5 by ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats avg(numberField) by wrongField', [
|
||||
'Unknown column [wrongField]',
|
||||
|
@ -1584,7 +1593,7 @@ describe('validation logic', () => {
|
|||
testErrorsAndWarnings(
|
||||
'from a_index | stats avg(numberField) by stringField, percentile(numberField) by ipField',
|
||||
[
|
||||
'SyntaxError: expected {<EOF>, PIPE, AND, COMMA, OR, PLUS, MINUS, ASTERISK, SLASH, PERCENT} but found "by"',
|
||||
"SyntaxError: mismatched input 'by' expecting <EOF>",
|
||||
'STATS BY does not support function percentile',
|
||||
]
|
||||
);
|
||||
|
@ -1605,10 +1614,10 @@ describe('validation logic', () => {
|
|||
);
|
||||
}
|
||||
testErrorsAndWarnings('from a_index | stats count(* + 1) BY ipField', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "+"',
|
||||
"SyntaxError: no viable alternative at input 'count(* +'",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats count(* + round(numberField)) BY ipField', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "+"',
|
||||
"SyntaxError: no viable alternative at input 'count(* +'",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | stats count(round(*)) BY ipField', [
|
||||
'Using wildcards (*) in round is not allowed',
|
||||
|
@ -1952,12 +1961,12 @@ describe('validation logic', () => {
|
|||
|
||||
describe('sort', () => {
|
||||
testErrorsAndWarnings('from a_index | sort ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | sort "field" ', []);
|
||||
testErrorsAndWarnings('from a_index | sort wrongField ', ['Unknown column [wrongField]']);
|
||||
testErrorsAndWarnings('from a_index | sort numberField, ', [
|
||||
'SyntaxError: expected {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, LP, NOT, NULL, PARAM, TRUE, PLUS, MINUS, OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting {STRING, INTEGER_LITERAL, DECIMAL_LITERAL, FALSE, '(', NOT, NULL, '?', TRUE, '+', '-', OPENING_BRACKET, UNQUOTED_IDENTIFIER, QUOTED_IDENTIFIER}",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | sort numberField, stringField', []);
|
||||
for (const dir of ['desc', 'asc']) {
|
||||
|
@ -2011,10 +2020,10 @@ describe('validation logic', () => {
|
|||
]);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy `, []);
|
||||
testErrorsAndWarnings('from a_index | enrich `this``is fine`', [
|
||||
'SyntaxError: expected {ENRICH_POLICY_NAME} but found "`this``is fine`"',
|
||||
"SyntaxError: mismatched input '`this``is fine`' expecting ENRICH_POLICY_NAME",
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | enrich this is fine', [
|
||||
'SyntaxError: expected {<EOF>, PIPE, ON, WITH} but found "is"',
|
||||
"SyntaxError: mismatched input 'is' expecting <EOF>",
|
||||
'Unknown policy [this]',
|
||||
]);
|
||||
for (const value of ['any', 'coordinator', 'remote']) {
|
||||
|
@ -2048,11 +2057,11 @@ describe('validation logic', () => {
|
|||
'Unknown column [this`is fine]',
|
||||
]);
|
||||
testErrorsAndWarnings('from a_index | enrich policy on this is fine', [
|
||||
'SyntaxError: expected {<EOF>, PIPE, DOT, WITH} but found "is"',
|
||||
"SyntaxError: mismatched input 'is' expecting <EOF>",
|
||||
'Unknown column [this]',
|
||||
]);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy on stringField with `, [
|
||||
'SyntaxError: expected {ID_PATTERN} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting ID_PATTERN",
|
||||
]);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy on stringField with var0 `, [
|
||||
'Unknown column [var0]',
|
||||
|
@ -2071,7 +2080,7 @@ describe('validation logic', () => {
|
|||
// ]);
|
||||
testErrorsAndWarnings(`from a_index |enrich policy on numberField with var0 = , `, [
|
||||
"SyntaxError: missing ID_PATTERN at ','",
|
||||
'SyntaxError: expected {ID_PATTERN} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting ID_PATTERN",
|
||||
'Unknown column [var0]',
|
||||
]);
|
||||
testErrorsAndWarnings(
|
||||
|
@ -2100,7 +2109,7 @@ describe('validation logic', () => {
|
|||
[]
|
||||
);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy with `, [
|
||||
'SyntaxError: expected {ID_PATTERN} but found "<EOF>"',
|
||||
"SyntaxError: mismatched input '<EOF>' expecting ID_PATTERN",
|
||||
]);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy with otherField`, []);
|
||||
testErrorsAndWarnings(`from a_index | enrich policy | eval otherField`, []);
|
||||
|
|
|
@ -882,6 +882,7 @@ export async function validateAst(
|
|||
});
|
||||
messages.push(...commandMessages);
|
||||
}
|
||||
|
||||
return {
|
||||
errors: [...errors, ...messages.filter(({ type }) => type === 'error')],
|
||||
warnings: messages.filter(({ type }) => type === 'warning'),
|
||||
|
|
|
@ -6,36 +6,24 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { ANTLRErrorListener, Recognizer, RecognitionException } from 'antlr4ts';
|
||||
import type { EditorError } from '../../../types';
|
||||
import { createError } from '../ast/ast_errors';
|
||||
|
||||
export class ESQLErrorListener implements ANTLRErrorListener<any> {
|
||||
private errors: EditorError[] = [];
|
||||
import type { Recognizer, RecognitionException } from 'antlr4';
|
||||
import { ANTLRErrorListener } from '../../../common/error_listener';
|
||||
import { getPosition } from '../ast/ast_position_utils';
|
||||
|
||||
export class ESQLErrorListener extends ANTLRErrorListener {
|
||||
syntaxError(
|
||||
recognizer: Recognizer<any, any>,
|
||||
recognizer: Recognizer<any>,
|
||||
offendingSymbol: any,
|
||||
line: number,
|
||||
column: number,
|
||||
message: string,
|
||||
error: RecognitionException | undefined
|
||||
): void {
|
||||
const higherLevelError = error ? createError(error) : undefined;
|
||||
const textMessage =
|
||||
higherLevelError?.text && higherLevelError.text !== error?.message
|
||||
? higherLevelError.text
|
||||
: `SyntaxError: ${message}`;
|
||||
const textMessage = `SyntaxError: ${message}`;
|
||||
|
||||
let endColumn = column + 1;
|
||||
let startColumn = column;
|
||||
|
||||
if (higherLevelError) {
|
||||
startColumn = higherLevelError.location.min + 1;
|
||||
endColumn = higherLevelError.location.max + 1;
|
||||
} else if (offendingSymbol?._text) {
|
||||
endColumn = column + offendingSymbol._text.length;
|
||||
}
|
||||
const tokenPosition = getPosition(offendingSymbol);
|
||||
const startColumn = tokenPosition?.min + 1 || column;
|
||||
const endColumn = tokenPosition?.max + 1 || column + 1;
|
||||
|
||||
this.errors.push({
|
||||
startLineNumber: line,
|
||||
|
@ -46,8 +34,4 @@ export class ESQLErrorListener implements ANTLRErrorListener<any> {
|
|||
severity: 8, // monaco.MarkerSeverity.Error,
|
||||
});
|
||||
}
|
||||
|
||||
getErrors(): EditorError[] {
|
||||
return this.errors;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CharStreams, type Token } from 'antlr4ts';
|
||||
import { CharStreams, type Token } from 'antlr4';
|
||||
import { monaco } from '../../../monaco_imports';
|
||||
import { ANTLREErrorListener } from '../../../common/error_listener';
|
||||
import { ESQLErrorListener } from './esql_error_listener';
|
||||
|
||||
import { ESQLToken } from './esql_token';
|
||||
import { ESQLLineTokens } from './esql_line_tokens';
|
||||
|
@ -27,7 +27,7 @@ export class ESQLTokensProvider implements monaco.languages.TokensProvider {
|
|||
|
||||
tokenize(line: string, prevState: ESQLState): monaco.languages.ILineTokens {
|
||||
const errorStartingPoints: number[] = [];
|
||||
const errorListener = new ANTLREErrorListener();
|
||||
const errorListener = new ESQLErrorListener();
|
||||
// This has the drawback of not styling any ESQL wrong query as
|
||||
// | from ...
|
||||
const cleanedLine =
|
||||
|
@ -54,14 +54,14 @@ export class ESQLTokensProvider implements monaco.languages.TokensProvider {
|
|||
if (token.type === EOF) {
|
||||
done = true;
|
||||
} else {
|
||||
const tokenTypeName = lexer.vocabulary.getSymbolicName(token.type);
|
||||
const tokenTypeName = lexer.symbolicNames[token.type];
|
||||
|
||||
if (tokenTypeName) {
|
||||
const indexOffset = cleanedLine === line ? 0 : line.length - cleanedLine.length;
|
||||
const myToken = new ESQLToken(
|
||||
tokenTypeName,
|
||||
token.startIndex + indexOffset,
|
||||
token.stopIndex + indexOffset
|
||||
token.start + indexOffset,
|
||||
token.stop + indexOffset
|
||||
);
|
||||
myTokens.push(myToken);
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ export class ESQLTokensProvider implements monaco.languages.TokensProvider {
|
|||
|
||||
myTokens.sort((a, b) => a.startIndex - b.startIndex);
|
||||
|
||||
// special tratement for functions
|
||||
// special treatment for functions
|
||||
// the previous custom Kibana grammar baked functions directly as tokens, so highlight was easier
|
||||
// The ES grammar doesn't have the token concept of "function"
|
||||
const tokensWithFunctions = enrichTokensWithFunctionsMetadata(myTokens);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CharStreams } from 'antlr4ts';
|
||||
import { CharStreams } from 'antlr4';
|
||||
import type { monaco } from '../../monaco_imports';
|
||||
import type { BaseWorkerDefinition } from '../../types';
|
||||
import { getParser, ROOT_STATEMENT } from '../lib/antlr_facade';
|
||||
|
|
|
@ -111,12 +111,12 @@ To add additional contexts, edit the `supportedContexts` constant in `kbn-monaco
|
|||
|
||||
[ANTLR](https://www.antlr.org/) generates lexical and syntax errors out of the box, which we can use to set error markers in monaco.
|
||||
|
||||
Elasticsearch has defined [lexer and parser grammar](https://github.com/elastic/elasticsearch/tree/master/modules/lang-painless/src/main/antlr) for the Painless language. For now, these rules have been largely copied from ES to Kibana and reside in the `antlr` directory with the `.g4` file extension. We then use [antlr4ts](https://github.com/tunnelvisionlabs/antlr4ts) to generate a lexer and a parser in Typescript.
|
||||
Elasticsearch has defined [lexer and parser grammar](https://github.com/elastic/elasticsearch/tree/master/modules/lang-painless/src/main/antlr) for the Painless language. For now, these rules have been largely copied from ES to Kibana and reside in the `antlr` directory with the `.g4` file extension. We then generate a Typescript target for the lexer and a parser.
|
||||
|
||||
To regenerate the lexer and parser, run the following script:
|
||||
|
||||
```
|
||||
npm run build:antlr4ts
|
||||
npm run build:antlr4
|
||||
```
|
||||
|
||||
*Note:* This script should only need to be run if a change has been made to `painless_lexer.g4` or `painless_parser.g4`.
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,22 +1,18 @@
|
|||
// @ts-nocheck
|
||||
// Generated from ./src/painless/antlr/painless_lexer.g4 by ANTLR 4.7.3-SNAPSHOT
|
||||
|
||||
|
||||
import { ATN } from "antlr4ts/atn/ATN";
|
||||
import { ATNDeserializer } from "antlr4ts/atn/ATNDeserializer";
|
||||
import { CharStream } from "antlr4ts/CharStream";
|
||||
import { Lexer } from "antlr4ts/Lexer";
|
||||
import { LexerATNSimulator } from "antlr4ts/atn/LexerATNSimulator";
|
||||
import { NotNull } from "antlr4ts/Decorators";
|
||||
import { Override } from "antlr4ts/Decorators";
|
||||
import { RuleContext } from "antlr4ts/RuleContext";
|
||||
import { Vocabulary } from "antlr4ts/Vocabulary";
|
||||
import { VocabularyImpl } from "antlr4ts/VocabularyImpl";
|
||||
|
||||
import * as Utils from "antlr4ts/misc/Utils";
|
||||
|
||||
|
||||
export class painless_lexer extends Lexer {
|
||||
// Generated from ./src/painless/antlr/painless_lexer.g4 by ANTLR 4.13.1
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
export default class painless_lexer extends Lexer {
|
||||
public static readonly WS = 1;
|
||||
public static readonly COMMENT = 2;
|
||||
public static readonly LBRACK = 3;
|
||||
|
@ -103,17 +99,102 @@ export class painless_lexer extends Lexer {
|
|||
public static readonly ID = 84;
|
||||
public static readonly DOTINTEGER = 85;
|
||||
public static readonly DOTID = 86;
|
||||
public static readonly EOF = Token.EOF;
|
||||
public static readonly AFTER_DOT = 1;
|
||||
|
||||
// tslint:disable:no-trailing-whitespace
|
||||
public static readonly channelNames: string[] = [
|
||||
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
|
||||
];
|
||||
|
||||
// tslint:disable:no-trailing-whitespace
|
||||
public static readonly modeNames: string[] = [
|
||||
"DEFAULT_MODE", "AFTER_DOT",
|
||||
];
|
||||
public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
public static readonly literalNames: (string | null)[] = [ null, null,
|
||||
null, "'{'",
|
||||
"'}'", "'['",
|
||||
"']'", "'('",
|
||||
"')'", "'$'",
|
||||
"'.'", "'?.'",
|
||||
"','", "';'",
|
||||
"'if'", "'in'",
|
||||
"'else'", "'while'",
|
||||
"'do'", "'for'",
|
||||
"'continue'",
|
||||
"'break'", "'return'",
|
||||
"'new'", "'try'",
|
||||
"'catch'", "'throw'",
|
||||
"'this'", "'instanceof'",
|
||||
"'!'", "'~'",
|
||||
"'*'", "'/'",
|
||||
"'%'", "'+'",
|
||||
"'-'", "'<<'",
|
||||
"'>>'", "'>>>'",
|
||||
"'<'", "'<='",
|
||||
"'>'", "'>='",
|
||||
"'=='", "'==='",
|
||||
"'!='", "'!=='",
|
||||
"'&'", "'^'",
|
||||
"'|'", "'&&'",
|
||||
"'||'", "'?'",
|
||||
"':'", "'?:'",
|
||||
"'::'", "'->'",
|
||||
"'=~'", "'==~'",
|
||||
"'++'", "'--'",
|
||||
"'='", "'+='",
|
||||
"'-='", "'*='",
|
||||
"'/='", "'%='",
|
||||
"'&='", "'^='",
|
||||
"'|='", "'<<='",
|
||||
"'>>='", "'>>>='",
|
||||
null, null,
|
||||
null, null,
|
||||
null, null,
|
||||
"'true'", "'false'",
|
||||
"'null'", null,
|
||||
"'def'" ];
|
||||
public static readonly symbolicNames: (string | null)[] = [ null, "WS",
|
||||
"COMMENT",
|
||||
"LBRACK", "RBRACK",
|
||||
"LBRACE", "RBRACE",
|
||||
"LP", "RP",
|
||||
"DOLLAR", "DOT",
|
||||
"NSDOT", "COMMA",
|
||||
"SEMICOLON",
|
||||
"IF", "IN",
|
||||
"ELSE", "WHILE",
|
||||
"DO", "FOR",
|
||||
"CONTINUE",
|
||||
"BREAK", "RETURN",
|
||||
"NEW", "TRY",
|
||||
"CATCH", "THROW",
|
||||
"THIS", "INSTANCEOF",
|
||||
"BOOLNOT",
|
||||
"BWNOT", "MUL",
|
||||
"DIV", "REM",
|
||||
"ADD", "SUB",
|
||||
"LSH", "RSH",
|
||||
"USH", "LT",
|
||||
"LTE", "GT",
|
||||
"GTE", "EQ",
|
||||
"EQR", "NE",
|
||||
"NER", "BWAND",
|
||||
"XOR", "BWOR",
|
||||
"BOOLAND",
|
||||
"BOOLOR", "COND",
|
||||
"COLON", "ELVIS",
|
||||
"REF", "ARROW",
|
||||
"FIND", "MATCH",
|
||||
"INCR", "DECR",
|
||||
"ASSIGN", "AADD",
|
||||
"ASUB", "AMUL",
|
||||
"ADIV", "AREM",
|
||||
"AAND", "AXOR",
|
||||
"AOR", "ALSH",
|
||||
"ARSH", "AUSH",
|
||||
"OCTAL", "HEX",
|
||||
"INTEGER",
|
||||
"DECIMAL",
|
||||
"STRING", "REGEX",
|
||||
"TRUE", "FALSE",
|
||||
"NULL", "PRIMITIVE",
|
||||
"DEF", "ID",
|
||||
"DOTINTEGER",
|
||||
"DOTID" ];
|
||||
public static readonly modeNames: string[] = [ "DEFAULT_MODE", "AFTER_DOT", ];
|
||||
|
||||
public static readonly ruleNames: string[] = [
|
||||
"WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOLLAR",
|
||||
|
@ -128,79 +209,42 @@ export class painless_lexer extends Lexer {
|
|||
"NULL", "PRIMITIVE", "DEF", "ID", "DOTINTEGER", "DOTID",
|
||||
];
|
||||
|
||||
private static readonly _LITERAL_NAMES: Array<string | undefined> = [
|
||||
undefined, undefined, undefined, "'{'", "'}'", "'['", "']'", "'('", "')'",
|
||||
"'$'", "'.'", "'?.'", "','", "';'", "'if'", "'in'", "'else'", "'while'",
|
||||
"'do'", "'for'", "'continue'", "'break'", "'return'", "'new'", "'try'",
|
||||
"'catch'", "'throw'", "'this'", "'instanceof'", "'!'", "'~'", "'*'", "'/'",
|
||||
"'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='",
|
||||
"'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'",
|
||||
"'?'", "':'", "'?:'", "'::'", "'->'", "'=~'", "'==~'", "'++'", "'--'",
|
||||
"'='", "'+='", "'-='", "'*='", "'/='", "'%='", "'&='", "'^='", "'|='",
|
||||
"'<<='", "'>>='", "'>>>='", undefined, undefined, undefined, undefined,
|
||||
undefined, undefined, "'true'", "'false'", "'null'", undefined, "'def'",
|
||||
];
|
||||
private static readonly _SYMBOLIC_NAMES: Array<string | undefined> = [
|
||||
undefined, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP",
|
||||
"RP", "DOLLAR", "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE",
|
||||
"WHILE", "DO", "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH",
|
||||
"THROW", "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM",
|
||||
"ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR",
|
||||
"NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON",
|
||||
"ELVIS", "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD",
|
||||
"ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH",
|
||||
"AUSH", "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE",
|
||||
"FALSE", "NULL", "PRIMITIVE", "DEF", "ID", "DOTINTEGER", "DOTID",
|
||||
];
|
||||
public static readonly VOCABULARY: Vocabulary = new VocabularyImpl(painless_lexer._LITERAL_NAMES, painless_lexer._SYMBOLIC_NAMES, []);
|
||||
|
||||
// @Override
|
||||
// @NotNull
|
||||
public get vocabulary(): Vocabulary {
|
||||
return painless_lexer.VOCABULARY;
|
||||
}
|
||||
// tslint:enable:no-trailing-whitespace
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(painless_lexer._ATN, this);
|
||||
this._interp = new LexerATNSimulator(this, painless_lexer._ATN, painless_lexer.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
|
||||
// @Override
|
||||
public get grammarFileName(): string { return "painless_lexer.g4"; }
|
||||
|
||||
// @Override
|
||||
public get literalNames(): (string | null)[] { return painless_lexer.literalNames; }
|
||||
public get symbolicNames(): (string | null)[] { return painless_lexer.symbolicNames; }
|
||||
public get ruleNames(): string[] { return painless_lexer.ruleNames; }
|
||||
|
||||
// @Override
|
||||
public get serializedATN(): string { return painless_lexer._serializedATN; }
|
||||
public get serializedATN(): number[] { return painless_lexer._serializedATN; }
|
||||
|
||||
// @Override
|
||||
public get channelNames(): string[] { return painless_lexer.channelNames; }
|
||||
|
||||
// @Override
|
||||
public get modeNames(): string[] { return painless_lexer.modeNames; }
|
||||
|
||||
// @Override
|
||||
public sempred(_localctx: RuleContext, ruleIndex: number, predIndex: number): boolean {
|
||||
public sempred(localctx: RuleContext, ruleIndex: number, predIndex: number): boolean {
|
||||
switch (ruleIndex) {
|
||||
case 31:
|
||||
return this.DIV_sempred(_localctx, predIndex);
|
||||
|
||||
return this.DIV_sempred(localctx, predIndex);
|
||||
case 77:
|
||||
return this.REGEX_sempred(_localctx, predIndex);
|
||||
return this.REGEX_sempred(localctx, predIndex);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
private DIV_sempred(_localctx: RuleContext, predIndex: number): boolean {
|
||||
private DIV_sempred(localctx: RuleContext, predIndex: number): boolean {
|
||||
switch (predIndex) {
|
||||
case 0:
|
||||
return this.isSlashRegex() == false ;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
private REGEX_sempred(_localctx: RuleContext, predIndex: number): boolean {
|
||||
private REGEX_sempred(localctx: RuleContext, predIndex: number): boolean {
|
||||
switch (predIndex) {
|
||||
case 1:
|
||||
return this.isSlashRegex() ;
|
||||
|
@ -208,330 +252,229 @@ export class painless_lexer extends Lexer {
|
|||
return true;
|
||||
}
|
||||
|
||||
private static readonly _serializedATNSegments: number = 2;
|
||||
private static readonly _serializedATNSegment0: string =
|
||||
"\x03\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x02X\u027E\b\x01" +
|
||||
"\b\x01\x04\x02\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06" +
|
||||
"\t\x06\x04\x07\t\x07\x04\b\t\b\x04\t\t\t\x04\n\t\n\x04\v\t\v\x04\f\t\f" +
|
||||
"\x04\r\t\r\x04\x0E\t\x0E\x04\x0F\t\x0F\x04\x10\t\x10\x04\x11\t\x11\x04" +
|
||||
"\x12\t\x12\x04\x13\t\x13\x04\x14\t\x14\x04\x15\t\x15\x04\x16\t\x16\x04" +
|
||||
"\x17\t\x17\x04\x18\t\x18\x04\x19\t\x19\x04\x1A\t\x1A\x04\x1B\t\x1B\x04" +
|
||||
"\x1C\t\x1C\x04\x1D\t\x1D\x04\x1E\t\x1E\x04\x1F\t\x1F\x04 \t \x04!\t!\x04" +
|
||||
"\"\t\"\x04#\t#\x04$\t$\x04%\t%\x04&\t&\x04\'\t\'\x04(\t(\x04)\t)\x04*" +
|
||||
"\t*\x04+\t+\x04,\t,\x04-\t-\x04.\t.\x04/\t/\x040\t0\x041\t1\x042\t2\x04" +
|
||||
"3\t3\x044\t4\x045\t5\x046\t6\x047\t7\x048\t8\x049\t9\x04:\t:\x04;\t;\x04" +
|
||||
"<\t<\x04=\t=\x04>\t>\x04?\t?\x04@\t@\x04A\tA\x04B\tB\x04C\tC\x04D\tD\x04" +
|
||||
"E\tE\x04F\tF\x04G\tG\x04H\tH\x04I\tI\x04J\tJ\x04K\tK\x04L\tL\x04M\tM\x04" +
|
||||
"N\tN\x04O\tO\x04P\tP\x04Q\tQ\x04R\tR\x04S\tS\x04T\tT\x04U\tU\x04V\tV\x04" +
|
||||
"W\tW\x03\x02\x06\x02\xB2\n\x02\r\x02\x0E\x02\xB3\x03\x02\x03\x02\x03\x03" +
|
||||
"\x03\x03\x03\x03\x03\x03\x07\x03\xBC\n\x03\f\x03\x0E\x03\xBF\v\x03\x03" +
|
||||
"\x03\x03\x03\x03\x03\x03\x03\x03\x03\x07\x03\xC6\n\x03\f\x03\x0E\x03\xC9" +
|
||||
"\v\x03\x03\x03\x03\x03\x05\x03\xCD\n\x03\x03\x03\x03\x03\x03\x04\x03\x04" +
|
||||
"\x03\x05\x03\x05\x03\x06\x03\x06\x03\x07\x03\x07\x03\b\x03\b\x03\t\x03" +
|
||||
"\t\x03\n\x03\n\x03\v\x03\v\x03\v\x03\v\x03\f\x03\f\x03\f\x03\f\x03\f\x03" +
|
||||
"\r\x03\r\x03\x0E\x03\x0E\x03\x0F\x03\x0F\x03\x0F\x03\x10\x03\x10\x03\x10" +
|
||||
"\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x12\x03\x12\x03\x12\x03\x12" +
|
||||
"\x03\x12\x03\x12\x03\x13\x03\x13\x03\x13\x03\x14\x03\x14\x03\x14\x03\x14" +
|
||||
"\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15" +
|
||||
"\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x17\x03\x17\x03\x17" +
|
||||
"\x03\x17\x03\x17\x03\x17\x03\x17\x03\x18\x03\x18\x03\x18\x03\x18\x03\x19" +
|
||||
"\x03\x19\x03\x19\x03\x19\x03\x1A\x03\x1A\x03\x1A\x03\x1A\x03\x1A\x03\x1A" +
|
||||
"\x03\x1B\x03\x1B\x03\x1B\x03\x1B\x03\x1B\x03\x1B\x03\x1C\x03\x1C\x03\x1C" +
|
||||
"\x03\x1C\x03\x1C\x03\x1D\x03\x1D\x03\x1D\x03\x1D\x03\x1D\x03\x1D\x03\x1D" +
|
||||
"\x03\x1D\x03\x1D\x03\x1D\x03\x1D\x03\x1E\x03\x1E\x03\x1F\x03\x1F\x03 " +
|
||||
"\x03 \x03!\x03!\x03!\x03\"\x03\"\x03#\x03#\x03$\x03$\x03%\x03%\x03%\x03" +
|
||||
"&\x03&\x03&\x03\'\x03\'\x03\'\x03\'\x03(\x03(\x03)\x03)\x03)\x03*\x03" +
|
||||
"*\x03+\x03+\x03+\x03,\x03,\x03,\x03-\x03-\x03-\x03-\x03.\x03.\x03.\x03" +
|
||||
"/\x03/\x03/\x03/\x030\x030\x031\x031\x032\x032\x033\x033\x033\x034\x03" +
|
||||
"4\x034\x035\x035\x036\x036\x037\x037\x037\x038\x038\x038\x039\x039\x03" +
|
||||
"9\x03:\x03:\x03:\x03;\x03;\x03;\x03;\x03<\x03<\x03<\x03=\x03=\x03=\x03" +
|
||||
">\x03>\x03?\x03?\x03?\x03@\x03@\x03@\x03A\x03A\x03A\x03B\x03B\x03B\x03" +
|
||||
"C\x03C\x03C\x03D\x03D\x03D\x03E\x03E\x03E\x03F\x03F\x03F\x03G\x03G\x03" +
|
||||
"G\x03G\x03H\x03H\x03H\x03H\x03I\x03I\x03I\x03I\x03I\x03J\x03J\x06J\u01BE" +
|
||||
"\nJ\rJ\x0EJ\u01BF\x03J\x05J\u01C3\nJ\x03K\x03K\x03K\x06K\u01C8\nK\rK\x0E" +
|
||||
"K\u01C9\x03K\x05K\u01CD\nK\x03L\x03L\x03L\x07L\u01D2\nL\fL\x0EL\u01D5" +
|
||||
"\vL\x05L\u01D7\nL\x03L\x05L\u01DA\nL\x03M\x03M\x03M\x07M\u01DF\nM\fM\x0E" +
|
||||
"M\u01E2\vM\x05M\u01E4\nM\x03M\x03M\x06M\u01E8\nM\rM\x0EM\u01E9\x05M\u01EC" +
|
||||
"\nM\x03M\x03M\x05M\u01F0\nM\x03M\x06M\u01F3\nM\rM\x0EM\u01F4\x05M\u01F7" +
|
||||
"\nM\x03M\x05M\u01FA\nM\x03N\x03N\x03N\x03N\x03N\x03N\x07N\u0202\nN\fN" +
|
||||
"\x0EN\u0205\vN\x03N\x03N\x03N\x03N\x03N\x03N\x03N\x07N\u020E\nN\fN\x0E" +
|
||||
"N\u0211\vN\x03N\x05N\u0214\nN\x03O\x03O\x03O\x03O\x06O\u021A\nO\rO\x0E" +
|
||||
"O\u021B\x03O\x03O\x07O\u0220\nO\fO\x0EO\u0223\vO\x03O\x03O\x03P\x03P\x03" +
|
||||
"P\x03P\x03P\x03Q\x03Q\x03Q\x03Q\x03Q\x03Q\x03R\x03R\x03R\x03R\x03R\x03" +
|
||||
"S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03" +
|
||||
"S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03" +
|
||||
"S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x03S\x05S\u025D\nS\x03T\x03" +
|
||||
"T\x03T\x03T\x03U\x03U\x07U\u0265\nU\fU\x0EU\u0268\vU\x03V\x03V\x03V\x07" +
|
||||
"V\u026D\nV\fV\x0EV\u0270\vV\x05V\u0272\nV\x03V\x03V\x03W\x03W\x07W\u0278" +
|
||||
"\nW\fW\x0EW\u027B\vW\x03W\x03W\x07\xBD\xC7\u0203\u020F\u021B\x02\x02X" +
|
||||
"\x04\x02\x03\x06\x02\x04\b\x02\x05\n\x02\x06\f\x02\x07\x0E\x02\b\x10\x02" +
|
||||
"\t\x12\x02\n\x14\x02\v\x16\x02\f\x18\x02\r\x1A\x02\x0E\x1C\x02\x0F\x1E" +
|
||||
"\x02\x10 \x02\x11\"\x02\x12$\x02\x13&\x02\x14(\x02\x15*\x02\x16,\x02\x17" +
|
||||
".\x02\x180\x02\x192\x02\x1A4\x02\x1B6\x02\x1C8\x02\x1D:\x02\x1E<\x02\x1F" +
|
||||
">\x02 @\x02!B\x02\"D\x02#F\x02$H\x02%J\x02&L\x02\'N\x02(P\x02)R\x02*T" +
|
||||
"\x02+V\x02,X\x02-Z\x02.\\\x02/^\x020`\x021b\x022d\x023f\x024h\x025j\x02" +
|
||||
"6l\x027n\x028p\x029r\x02:t\x02;v\x02<x\x02=z\x02>|\x02?~\x02@\x80\x02" +
|
||||
"A\x82\x02B\x84\x02C\x86\x02D\x88\x02E\x8A\x02F\x8C\x02G\x8E\x02H\x90\x02" +
|
||||
"I\x92\x02J\x94\x02K\x96\x02L\x98\x02M\x9A\x02N\x9C\x02O\x9E\x02P\xA0\x02" +
|
||||
"Q\xA2\x02R\xA4\x02S\xA6\x02T\xA8\x02U\xAA\x02V\xAC\x02W\xAE\x02X\x04\x02" +
|
||||
"\x03\x15\x05\x02\v\f\x0F\x0F\"\"\x04\x02\f\f\x0F\x0F\x03\x0229\x04\x02" +
|
||||
"NNnn\x04\x02ZZzz\x05\x022;CHch\x03\x023;\x03\x022;\b\x02FFHHNNffhhnn\x04" +
|
||||
"\x02GGgg\x04\x02--//\x06\x02FFHHffhh\x04\x02$$^^\x04\x02))^^\x03\x02\f" +
|
||||
"\f\x04\x02\f\f11\t\x02WWeekknouuwwzz\x05\x02C\\aac|\x06\x022;C\\aac|\x02" +
|
||||
"\u02A4\x02\x04\x03\x02\x02\x02\x02\x06\x03\x02\x02\x02\x02\b\x03\x02\x02" +
|
||||
"\x02\x02\n\x03\x02\x02\x02\x02\f\x03\x02\x02\x02\x02\x0E\x03\x02\x02\x02" +
|
||||
"\x02\x10\x03\x02\x02\x02\x02\x12\x03\x02\x02\x02\x02\x14\x03\x02\x02\x02" +
|
||||
"\x02\x16\x03\x02\x02\x02\x02\x18\x03\x02\x02\x02\x02\x1A\x03\x02\x02\x02" +
|
||||
"\x02\x1C\x03\x02\x02\x02\x02\x1E\x03\x02\x02\x02\x02 \x03\x02\x02\x02" +
|
||||
"\x02\"\x03\x02\x02\x02\x02$\x03\x02\x02\x02\x02&\x03\x02\x02\x02\x02(" +
|
||||
"\x03\x02\x02\x02\x02*\x03\x02\x02\x02\x02,\x03\x02\x02\x02\x02.\x03\x02" +
|
||||
"\x02\x02\x020\x03\x02\x02\x02\x022\x03\x02\x02\x02\x024\x03\x02\x02\x02" +
|
||||
"\x026\x03\x02\x02\x02\x028\x03\x02\x02\x02\x02:\x03\x02\x02\x02\x02<\x03" +
|
||||
"\x02\x02\x02\x02>\x03\x02\x02\x02\x02@\x03\x02\x02\x02\x02B\x03\x02\x02" +
|
||||
"\x02\x02D\x03\x02\x02\x02\x02F\x03\x02\x02\x02\x02H\x03\x02\x02\x02\x02" +
|
||||
"J\x03\x02\x02\x02\x02L\x03\x02\x02\x02\x02N\x03\x02\x02\x02\x02P\x03\x02" +
|
||||
"\x02\x02\x02R\x03\x02\x02\x02\x02T\x03\x02\x02\x02\x02V\x03\x02\x02\x02" +
|
||||
"\x02X\x03\x02\x02\x02\x02Z\x03\x02\x02\x02\x02\\\x03\x02\x02\x02\x02^" +
|
||||
"\x03\x02\x02\x02\x02`\x03\x02\x02\x02\x02b\x03\x02\x02\x02\x02d\x03\x02" +
|
||||
"\x02\x02\x02f\x03\x02\x02\x02\x02h\x03\x02\x02\x02\x02j\x03\x02\x02\x02" +
|
||||
"\x02l\x03\x02\x02\x02\x02n\x03\x02\x02\x02\x02p\x03\x02\x02\x02\x02r\x03" +
|
||||
"\x02\x02\x02\x02t\x03\x02\x02\x02\x02v\x03\x02\x02\x02\x02x\x03\x02\x02" +
|
||||
"\x02\x02z\x03\x02\x02\x02\x02|\x03\x02\x02\x02\x02~\x03\x02\x02\x02\x02" +
|
||||
"\x80\x03\x02\x02\x02\x02\x82\x03\x02\x02\x02\x02\x84\x03\x02\x02\x02\x02" +
|
||||
"\x86\x03\x02\x02\x02\x02\x88\x03\x02\x02\x02\x02\x8A\x03\x02\x02\x02\x02" +
|
||||
"\x8C\x03\x02\x02\x02\x02\x8E\x03\x02\x02\x02\x02\x90\x03\x02\x02\x02\x02" +
|
||||
"\x92\x03\x02\x02\x02\x02\x94\x03\x02\x02\x02\x02\x96\x03\x02\x02\x02\x02" +
|
||||
"\x98\x03\x02\x02\x02\x02\x9A\x03\x02\x02\x02\x02\x9C\x03\x02\x02\x02\x02" +
|
||||
"\x9E\x03\x02\x02\x02\x02\xA0\x03\x02\x02\x02\x02\xA2\x03\x02\x02\x02\x02" +
|
||||
"\xA4\x03\x02\x02\x02\x02\xA6\x03\x02\x02\x02\x02\xA8\x03\x02\x02\x02\x02" +
|
||||
"\xAA\x03\x02\x02\x02\x03\xAC\x03\x02\x02\x02\x03\xAE\x03\x02\x02\x02\x04" +
|
||||
"\xB1\x03\x02\x02\x02\x06\xCC\x03\x02\x02\x02\b\xD0\x03\x02\x02\x02\n\xD2" +
|
||||
"\x03\x02\x02\x02\f\xD4\x03\x02\x02\x02\x0E\xD6\x03\x02\x02\x02\x10\xD8" +
|
||||
"\x03\x02\x02\x02\x12\xDA\x03\x02\x02\x02\x14\xDC\x03\x02\x02\x02\x16\xDE" +
|
||||
"\x03\x02\x02\x02\x18\xE2\x03\x02\x02\x02\x1A\xE7\x03\x02\x02\x02\x1C\xE9" +
|
||||
"\x03\x02\x02\x02\x1E\xEB\x03\x02\x02\x02 \xEE\x03\x02\x02\x02\"\xF1\x03" +
|
||||
"\x02\x02\x02$\xF6\x03\x02\x02\x02&\xFC\x03\x02\x02\x02(\xFF\x03\x02\x02" +
|
||||
"\x02*\u0103\x03\x02\x02\x02,\u010C\x03\x02\x02\x02.\u0112\x03\x02\x02" +
|
||||
"\x020\u0119\x03\x02\x02\x022\u011D\x03\x02\x02\x024\u0121\x03\x02\x02" +
|
||||
"\x026\u0127\x03\x02\x02\x028\u012D\x03\x02\x02\x02:\u0132\x03\x02\x02" +
|
||||
"\x02<\u013D\x03\x02\x02\x02>\u013F\x03\x02\x02\x02@\u0141\x03\x02\x02" +
|
||||
"\x02B\u0143\x03\x02\x02\x02D\u0146\x03\x02\x02\x02F\u0148\x03\x02\x02" +
|
||||
"\x02H\u014A\x03\x02\x02\x02J\u014C\x03\x02\x02\x02L\u014F\x03\x02\x02" +
|
||||
"\x02N\u0152\x03\x02\x02\x02P\u0156\x03\x02\x02\x02R\u0158\x03\x02\x02" +
|
||||
"\x02T\u015B\x03\x02\x02\x02V\u015D\x03\x02\x02\x02X\u0160\x03\x02\x02" +
|
||||
"\x02Z\u0163\x03\x02\x02\x02\\\u0167\x03\x02\x02\x02^\u016A\x03\x02\x02" +
|
||||
"\x02`\u016E\x03\x02\x02\x02b\u0170\x03\x02\x02\x02d\u0172\x03\x02\x02" +
|
||||
"\x02f\u0174\x03\x02\x02\x02h\u0177\x03\x02\x02\x02j\u017A\x03\x02\x02" +
|
||||
"\x02l\u017C\x03\x02\x02\x02n\u017E\x03\x02\x02\x02p\u0181\x03\x02\x02" +
|
||||
"\x02r\u0184\x03\x02\x02\x02t\u0187\x03\x02\x02\x02v\u018A\x03\x02\x02" +
|
||||
"\x02x\u018E\x03\x02\x02\x02z\u0191\x03\x02\x02\x02|\u0194\x03\x02\x02" +
|
||||
"\x02~\u0196\x03\x02\x02\x02\x80\u0199\x03\x02\x02\x02\x82\u019C\x03\x02" +
|
||||
"\x02\x02\x84\u019F\x03\x02\x02\x02\x86\u01A2\x03\x02\x02\x02\x88\u01A5" +
|
||||
"\x03\x02\x02\x02\x8A\u01A8\x03\x02\x02\x02\x8C\u01AB\x03\x02\x02\x02\x8E" +
|
||||
"\u01AE\x03\x02\x02\x02\x90\u01B2\x03\x02\x02\x02\x92\u01B6\x03\x02\x02" +
|
||||
"\x02\x94\u01BB\x03\x02\x02\x02\x96\u01C4\x03\x02\x02\x02\x98\u01D6\x03" +
|
||||
"\x02\x02\x02\x9A\u01E3\x03\x02\x02\x02\x9C\u0213\x03\x02\x02\x02\x9E\u0215" +
|
||||
"\x03\x02\x02\x02\xA0\u0226\x03\x02\x02\x02\xA2\u022B\x03\x02\x02\x02\xA4" +
|
||||
"\u0231\x03\x02\x02\x02\xA6\u025C\x03\x02\x02\x02\xA8\u025E\x03\x02\x02" +
|
||||
"\x02\xAA\u0262\x03\x02\x02\x02\xAC\u0271\x03\x02\x02\x02\xAE\u0275\x03" +
|
||||
"\x02\x02\x02\xB0\xB2\t\x02\x02\x02\xB1\xB0\x03\x02\x02\x02\xB2\xB3\x03" +
|
||||
"\x02\x02\x02\xB3\xB1\x03\x02\x02\x02\xB3\xB4\x03\x02\x02\x02\xB4\xB5\x03" +
|
||||
"\x02\x02\x02\xB5\xB6\b\x02\x02\x02\xB6\x05\x03\x02\x02\x02\xB7\xB8\x07" +
|
||||
"1\x02\x02\xB8\xB9\x071\x02\x02\xB9\xBD\x03\x02\x02\x02\xBA\xBC\v\x02\x02" +
|
||||
"\x02\xBB\xBA\x03\x02\x02\x02\xBC\xBF\x03\x02\x02\x02\xBD\xBE\x03\x02\x02" +
|
||||
"\x02\xBD\xBB\x03\x02\x02\x02\xBE\xC0\x03\x02\x02\x02\xBF\xBD\x03\x02\x02" +
|
||||
"\x02\xC0\xCD\t\x03\x02\x02\xC1\xC2\x071\x02\x02\xC2\xC3\x07,\x02\x02\xC3" +
|
||||
"\xC7\x03\x02\x02\x02\xC4\xC6\v\x02\x02\x02\xC5\xC4\x03\x02\x02\x02\xC6" +
|
||||
"\xC9\x03\x02\x02\x02\xC7\xC8\x03\x02\x02\x02\xC7\xC5\x03\x02\x02\x02\xC8" +
|
||||
"\xCA\x03\x02\x02\x02\xC9\xC7\x03\x02\x02\x02\xCA\xCB\x07,\x02\x02\xCB" +
|
||||
"\xCD\x071\x02\x02\xCC\xB7\x03\x02\x02\x02\xCC\xC1\x03\x02\x02\x02\xCD" +
|
||||
"\xCE\x03\x02\x02\x02\xCE\xCF\b\x03\x02\x02\xCF\x07\x03\x02\x02\x02\xD0" +
|
||||
"\xD1\x07}\x02\x02\xD1\t\x03\x02\x02\x02\xD2\xD3\x07\x7F\x02\x02\xD3\v" +
|
||||
"\x03\x02\x02\x02\xD4\xD5\x07]\x02\x02\xD5\r\x03\x02\x02\x02\xD6\xD7\x07" +
|
||||
"_\x02\x02\xD7\x0F\x03\x02\x02\x02\xD8\xD9\x07*\x02\x02\xD9\x11\x03\x02" +
|
||||
"\x02\x02\xDA\xDB\x07+\x02\x02\xDB\x13\x03\x02\x02\x02\xDC\xDD\x07&\x02" +
|
||||
"\x02\xDD\x15\x03\x02\x02\x02\xDE\xDF\x070\x02\x02\xDF\xE0\x03\x02\x02" +
|
||||
"\x02\xE0\xE1\b\v\x03\x02\xE1\x17\x03\x02\x02\x02\xE2\xE3\x07A\x02\x02" +
|
||||
"\xE3\xE4\x070\x02\x02\xE4\xE5\x03\x02\x02\x02\xE5\xE6\b\f\x03\x02\xE6" +
|
||||
"\x19\x03\x02\x02\x02\xE7\xE8\x07.\x02\x02\xE8\x1B\x03\x02\x02\x02\xE9" +
|
||||
"\xEA\x07=\x02\x02\xEA\x1D\x03\x02\x02\x02\xEB\xEC\x07k\x02\x02\xEC\xED" +
|
||||
"\x07h\x02\x02\xED\x1F\x03\x02\x02\x02\xEE\xEF\x07k\x02\x02\xEF\xF0\x07" +
|
||||
"p\x02\x02\xF0!\x03\x02\x02\x02\xF1\xF2\x07g\x02\x02\xF2\xF3\x07n\x02\x02" +
|
||||
"\xF3\xF4\x07u\x02\x02\xF4\xF5\x07g\x02\x02\xF5#\x03\x02\x02\x02\xF6\xF7" +
|
||||
"\x07y\x02\x02\xF7\xF8\x07j\x02\x02\xF8\xF9\x07k\x02\x02\xF9\xFA\x07n\x02" +
|
||||
"\x02\xFA\xFB\x07g\x02\x02\xFB%\x03\x02\x02\x02\xFC\xFD\x07f\x02\x02\xFD" +
|
||||
"\xFE\x07q\x02\x02\xFE\'\x03\x02\x02\x02\xFF\u0100\x07h\x02\x02\u0100\u0101" +
|
||||
"\x07q\x02\x02\u0101\u0102\x07t\x02\x02\u0102)\x03\x02\x02\x02\u0103\u0104" +
|
||||
"\x07e\x02\x02\u0104\u0105\x07q\x02\x02\u0105\u0106\x07p\x02\x02\u0106" +
|
||||
"\u0107\x07v\x02\x02\u0107\u0108\x07k\x02\x02\u0108\u0109\x07p\x02\x02" +
|
||||
"\u0109\u010A\x07w\x02\x02\u010A\u010B\x07g\x02\x02\u010B+\x03\x02\x02" +
|
||||
"\x02\u010C\u010D\x07d\x02\x02\u010D\u010E\x07t\x02\x02\u010E\u010F\x07" +
|
||||
"g\x02\x02\u010F\u0110\x07c\x02\x02\u0110\u0111\x07m\x02\x02\u0111-\x03" +
|
||||
"\x02\x02\x02\u0112\u0113\x07t\x02\x02\u0113\u0114\x07g\x02\x02\u0114\u0115" +
|
||||
"\x07v\x02\x02\u0115\u0116\x07w\x02\x02\u0116\u0117\x07t\x02\x02\u0117" +
|
||||
"\u0118\x07p\x02\x02\u0118/\x03\x02\x02\x02\u0119\u011A\x07p\x02\x02\u011A" +
|
||||
"\u011B\x07g\x02\x02\u011B\u011C\x07y\x02\x02\u011C1\x03\x02\x02\x02\u011D" +
|
||||
"\u011E\x07v\x02\x02\u011E\u011F\x07t\x02\x02\u011F\u0120\x07{\x02\x02" +
|
||||
"\u01203\x03\x02\x02\x02\u0121\u0122\x07e\x02\x02\u0122\u0123\x07c\x02" +
|
||||
"\x02\u0123\u0124\x07v\x02\x02\u0124\u0125\x07e\x02\x02\u0125\u0126\x07" +
|
||||
"j\x02\x02\u01265\x03\x02\x02\x02\u0127\u0128\x07v\x02\x02\u0128\u0129" +
|
||||
"\x07j\x02\x02\u0129\u012A\x07t\x02\x02\u012A\u012B\x07q\x02\x02\u012B" +
|
||||
"\u012C\x07y\x02\x02\u012C7\x03\x02\x02\x02\u012D\u012E\x07v\x02\x02\u012E" +
|
||||
"\u012F\x07j\x02\x02\u012F\u0130\x07k\x02\x02\u0130\u0131\x07u\x02\x02" +
|
||||
"\u01319\x03\x02\x02\x02\u0132\u0133\x07k\x02\x02\u0133\u0134\x07p\x02" +
|
||||
"\x02\u0134\u0135\x07u\x02\x02\u0135\u0136\x07v\x02\x02\u0136\u0137\x07" +
|
||||
"c\x02\x02\u0137\u0138\x07p\x02\x02\u0138\u0139\x07e\x02\x02\u0139\u013A" +
|
||||
"\x07g\x02\x02\u013A\u013B\x07q\x02\x02\u013B\u013C\x07h\x02\x02\u013C" +
|
||||
";\x03\x02\x02\x02\u013D\u013E\x07#\x02\x02\u013E=\x03\x02\x02\x02\u013F" +
|
||||
"\u0140\x07\x80\x02\x02\u0140?\x03\x02\x02\x02\u0141\u0142\x07,\x02\x02" +
|
||||
"\u0142A\x03\x02\x02\x02\u0143\u0144\x071\x02\x02\u0144\u0145\x06!\x02" +
|
||||
"\x02\u0145C\x03\x02\x02\x02\u0146\u0147\x07\'\x02\x02\u0147E\x03\x02\x02" +
|
||||
"\x02\u0148\u0149\x07-\x02\x02\u0149G\x03\x02\x02\x02\u014A\u014B\x07/" +
|
||||
"\x02\x02\u014BI\x03\x02\x02\x02\u014C\u014D\x07>\x02\x02\u014D\u014E\x07" +
|
||||
">\x02\x02\u014EK\x03\x02\x02\x02\u014F\u0150\x07@\x02\x02\u0150\u0151" +
|
||||
"\x07@\x02\x02\u0151M\x03\x02\x02\x02\u0152\u0153\x07@\x02\x02\u0153\u0154" +
|
||||
"\x07@\x02\x02\u0154\u0155\x07@\x02\x02\u0155O\x03\x02\x02\x02\u0156\u0157" +
|
||||
"\x07>\x02\x02\u0157Q\x03\x02\x02\x02\u0158\u0159\x07>\x02\x02\u0159\u015A" +
|
||||
"\x07?\x02\x02\u015AS\x03\x02\x02\x02\u015B\u015C\x07@\x02\x02\u015CU\x03" +
|
||||
"\x02\x02\x02\u015D\u015E\x07@\x02\x02\u015E\u015F\x07?\x02\x02\u015FW" +
|
||||
"\x03\x02\x02\x02\u0160\u0161\x07?\x02\x02\u0161\u0162\x07?\x02\x02\u0162" +
|
||||
"Y\x03\x02\x02\x02\u0163\u0164\x07?\x02\x02\u0164\u0165\x07?\x02\x02\u0165" +
|
||||
"\u0166\x07?\x02\x02\u0166[\x03\x02\x02\x02\u0167\u0168\x07#\x02\x02\u0168" +
|
||||
"\u0169\x07?\x02\x02\u0169]\x03\x02\x02\x02\u016A\u016B\x07#\x02\x02\u016B" +
|
||||
"\u016C\x07?\x02\x02\u016C\u016D\x07?\x02\x02\u016D_\x03\x02\x02\x02\u016E" +
|
||||
"\u016F\x07(\x02\x02\u016Fa\x03\x02\x02\x02\u0170\u0171\x07`\x02\x02\u0171" +
|
||||
"c\x03\x02\x02\x02\u0172\u0173\x07~\x02\x02\u0173e\x03\x02\x02\x02\u0174" +
|
||||
"\u0175\x07(\x02\x02\u0175\u0176\x07(\x02\x02\u0176g\x03\x02\x02\x02\u0177" +
|
||||
"\u0178\x07~\x02\x02\u0178\u0179\x07~\x02\x02\u0179i\x03\x02\x02\x02\u017A" +
|
||||
"\u017B\x07A\x02\x02\u017Bk\x03\x02\x02\x02\u017C\u017D\x07<\x02\x02\u017D" +
|
||||
"m\x03\x02\x02\x02\u017E\u017F\x07A\x02\x02\u017F\u0180\x07<\x02\x02\u0180" +
|
||||
"o\x03\x02\x02\x02\u0181\u0182\x07<\x02\x02\u0182\u0183\x07<\x02\x02\u0183" +
|
||||
"q\x03\x02\x02\x02\u0184\u0185\x07/\x02\x02\u0185\u0186\x07@\x02\x02\u0186" +
|
||||
"s\x03\x02\x02\x02\u0187\u0188\x07?\x02\x02\u0188\u0189\x07\x80\x02\x02" +
|
||||
"\u0189u\x03\x02\x02\x02\u018A\u018B\x07?\x02\x02\u018B\u018C\x07?\x02" +
|
||||
"\x02\u018C\u018D\x07\x80\x02\x02\u018Dw\x03\x02\x02\x02\u018E\u018F\x07" +
|
||||
"-\x02\x02\u018F\u0190\x07-\x02\x02\u0190y\x03\x02\x02\x02\u0191\u0192" +
|
||||
"\x07/\x02\x02\u0192\u0193\x07/\x02\x02\u0193{\x03\x02\x02\x02\u0194\u0195" +
|
||||
"\x07?\x02\x02\u0195}\x03\x02\x02\x02\u0196\u0197\x07-\x02\x02\u0197\u0198" +
|
||||
"\x07?\x02\x02\u0198\x7F\x03\x02\x02\x02\u0199\u019A\x07/\x02\x02\u019A" +
|
||||
"\u019B\x07?\x02\x02\u019B\x81\x03\x02\x02\x02\u019C\u019D\x07,\x02\x02" +
|
||||
"\u019D\u019E\x07?\x02\x02\u019E\x83\x03\x02\x02\x02\u019F\u01A0\x071\x02" +
|
||||
"\x02\u01A0\u01A1\x07?\x02\x02\u01A1\x85\x03\x02\x02\x02\u01A2\u01A3\x07" +
|
||||
"\'\x02\x02\u01A3\u01A4\x07?\x02\x02\u01A4\x87\x03\x02\x02\x02\u01A5\u01A6" +
|
||||
"\x07(\x02\x02\u01A6\u01A7\x07?\x02\x02\u01A7\x89\x03\x02\x02\x02\u01A8" +
|
||||
"\u01A9\x07`\x02\x02\u01A9\u01AA\x07?\x02\x02\u01AA\x8B\x03\x02\x02\x02" +
|
||||
"\u01AB\u01AC\x07~\x02\x02\u01AC\u01AD\x07?\x02\x02\u01AD\x8D\x03\x02\x02" +
|
||||
"\x02\u01AE\u01AF\x07>\x02\x02\u01AF\u01B0\x07>\x02\x02\u01B0\u01B1\x07" +
|
||||
"?\x02\x02\u01B1\x8F\x03\x02\x02\x02\u01B2\u01B3\x07@\x02\x02\u01B3\u01B4" +
|
||||
"\x07@\x02\x02\u01B4\u01B5\x07?\x02\x02\u01B5\x91\x03\x02\x02\x02\u01B6" +
|
||||
"\u01B7\x07@\x02\x02\u01B7\u01B8\x07@\x02\x02\u01B8\u01B9\x07@\x02\x02" +
|
||||
"\u01B9\u01BA\x07?\x02\x02\u01BA\x93\x03\x02\x02\x02\u01BB\u01BD\x072\x02" +
|
||||
"\x02\u01BC\u01BE\t\x04\x02\x02\u01BD\u01BC\x03\x02\x02\x02\u01BE\u01BF" +
|
||||
"\x03\x02\x02\x02\u01BF\u01BD\x03\x02\x02\x02\u01BF\u01C0\x03\x02\x02\x02" +
|
||||
"\u01C0\u01C2\x03\x02\x02\x02\u01C1\u01C3\t\x05\x02\x02\u01C2\u01C1\x03" +
|
||||
"\x02\x02\x02\u01C2\u01C3\x03\x02\x02\x02\u01C3\x95\x03\x02\x02\x02\u01C4" +
|
||||
"\u01C5\x072\x02\x02\u01C5\u01C7\t\x06\x02\x02\u01C6\u01C8\t\x07\x02\x02" +
|
||||
"\u01C7\u01C6\x03\x02\x02\x02\u01C8\u01C9\x03\x02\x02\x02\u01C9\u01C7\x03" +
|
||||
"\x02\x02\x02\u01C9\u01CA\x03\x02\x02\x02\u01CA\u01CC\x03\x02\x02\x02\u01CB" +
|
||||
"\u01CD\t\x05\x02\x02\u01CC\u01CB\x03\x02\x02\x02\u01CC\u01CD\x03\x02\x02" +
|
||||
"\x02\u01CD\x97\x03\x02\x02\x02\u01CE\u01D7\x072\x02\x02\u01CF\u01D3\t" +
|
||||
"\b\x02\x02\u01D0\u01D2\t\t\x02\x02\u01D1\u01D0\x03\x02\x02\x02\u01D2\u01D5" +
|
||||
"\x03\x02\x02\x02\u01D3\u01D1\x03\x02\x02\x02\u01D3\u01D4\x03\x02\x02\x02" +
|
||||
"\u01D4\u01D7\x03\x02\x02\x02\u01D5\u01D3\x03\x02\x02\x02\u01D6\u01CE\x03" +
|
||||
"\x02\x02\x02\u01D6\u01CF\x03\x02\x02\x02\u01D7\u01D9\x03\x02\x02\x02\u01D8" +
|
||||
"\u01DA\t\n\x02\x02\u01D9\u01D8\x03\x02\x02\x02\u01D9\u01DA\x03\x02\x02" +
|
||||
"\x02\u01DA\x99\x03\x02\x02\x02\u01DB\u01E4\x072\x02\x02\u01DC\u01E0\t" +
|
||||
"\b\x02\x02\u01DD\u01DF\t\t\x02\x02\u01DE\u01DD\x03\x02\x02\x02\u01DF\u01E2" +
|
||||
"\x03\x02\x02\x02\u01E0\u01DE\x03\x02\x02\x02\u01E0\u01E1\x03\x02\x02\x02" +
|
||||
"\u01E1\u01E4\x03\x02\x02\x02\u01E2\u01E0\x03\x02\x02\x02\u01E3\u01DB\x03" +
|
||||
"\x02\x02\x02\u01E3\u01DC\x03\x02\x02\x02\u01E4\u01EB\x03\x02\x02\x02\u01E5" +
|
||||
"\u01E7\x05\x16\v\x02\u01E6\u01E8\t\t\x02\x02\u01E7\u01E6\x03\x02\x02\x02" +
|
||||
"\u01E8\u01E9\x03\x02\x02\x02\u01E9\u01E7\x03\x02\x02\x02\u01E9\u01EA\x03" +
|
||||
"\x02\x02\x02\u01EA\u01EC\x03\x02\x02\x02\u01EB\u01E5\x03\x02\x02\x02\u01EB" +
|
||||
"\u01EC\x03\x02\x02\x02\u01EC\u01F6\x03\x02\x02\x02\u01ED\u01EF\t\v\x02" +
|
||||
"\x02\u01EE\u01F0\t\f\x02\x02\u01EF\u01EE\x03\x02\x02\x02\u01EF\u01F0\x03" +
|
||||
"\x02\x02\x02\u01F0\u01F2\x03\x02\x02\x02\u01F1\u01F3\t\t\x02\x02\u01F2" +
|
||||
"\u01F1\x03\x02\x02\x02\u01F3\u01F4\x03\x02\x02\x02\u01F4\u01F2\x03\x02" +
|
||||
"\x02\x02\u01F4\u01F5\x03\x02\x02\x02\u01F5\u01F7\x03\x02\x02\x02\u01F6" +
|
||||
"\u01ED\x03\x02\x02\x02\u01F6\u01F7\x03\x02\x02\x02\u01F7\u01F9\x03\x02" +
|
||||
"\x02\x02\u01F8\u01FA\t\r\x02\x02\u01F9\u01F8\x03\x02\x02\x02\u01F9\u01FA" +
|
||||
"\x03\x02\x02\x02\u01FA\x9B\x03\x02\x02\x02\u01FB\u0203\x07$\x02\x02\u01FC" +
|
||||
"\u01FD\x07^\x02\x02\u01FD\u0202\x07$\x02\x02\u01FE\u01FF\x07^\x02\x02" +
|
||||
"\u01FF\u0202\x07^\x02\x02\u0200\u0202\n\x0E\x02\x02\u0201\u01FC\x03\x02" +
|
||||
"\x02\x02\u0201\u01FE\x03\x02\x02\x02\u0201\u0200\x03\x02\x02\x02\u0202" +
|
||||
"\u0205\x03\x02\x02\x02\u0203\u0204\x03\x02\x02\x02\u0203\u0201\x03\x02" +
|
||||
"\x02\x02\u0204\u0206\x03\x02\x02\x02\u0205\u0203\x03\x02\x02\x02\u0206" +
|
||||
"\u0214\x07$\x02\x02\u0207\u020F\x07)\x02\x02\u0208\u0209\x07^\x02\x02" +
|
||||
"\u0209\u020E\x07)\x02\x02\u020A\u020B\x07^\x02\x02\u020B\u020E\x07^\x02" +
|
||||
"\x02\u020C\u020E\n\x0F\x02\x02\u020D\u0208";
|
||||
private static readonly _serializedATNSegment1: string =
|
||||
"\x03\x02\x02\x02\u020D\u020A\x03\x02\x02\x02\u020D\u020C\x03\x02\x02\x02" +
|
||||
"\u020E\u0211\x03\x02\x02\x02\u020F\u0210\x03\x02\x02\x02\u020F\u020D\x03" +
|
||||
"\x02\x02\x02\u0210\u0212\x03\x02\x02\x02\u0211\u020F\x03\x02\x02\x02\u0212" +
|
||||
"\u0214\x07)\x02\x02\u0213\u01FB\x03\x02\x02\x02\u0213\u0207\x03\x02\x02" +
|
||||
"\x02\u0214\x9D\x03\x02\x02\x02\u0215\u0219\x071\x02\x02\u0216\u0217\x07" +
|
||||
"^\x02\x02\u0217\u021A\n\x10\x02\x02\u0218\u021A\n\x11\x02\x02\u0219\u0216" +
|
||||
"\x03\x02\x02\x02\u0219\u0218\x03\x02\x02\x02\u021A\u021B\x03\x02\x02\x02" +
|
||||
"\u021B\u021C\x03\x02\x02\x02\u021B\u0219\x03\x02\x02\x02\u021C\u021D\x03" +
|
||||
"\x02\x02\x02\u021D\u0221\x071\x02\x02\u021E\u0220\t\x12\x02\x02\u021F" +
|
||||
"\u021E\x03\x02\x02\x02\u0220\u0223\x03\x02\x02\x02\u0221\u021F\x03\x02" +
|
||||
"\x02\x02\u0221\u0222\x03\x02\x02\x02\u0222\u0224\x03\x02\x02\x02\u0223" +
|
||||
"\u0221\x03\x02\x02\x02\u0224\u0225\x06O\x03\x02\u0225\x9F\x03\x02\x02" +
|
||||
"\x02\u0226\u0227\x07v\x02\x02\u0227\u0228\x07t\x02\x02\u0228\u0229\x07" +
|
||||
"w\x02\x02\u0229\u022A\x07g\x02\x02\u022A\xA1\x03\x02\x02\x02\u022B\u022C" +
|
||||
"\x07h\x02\x02\u022C\u022D\x07c\x02\x02\u022D\u022E\x07n\x02\x02\u022E" +
|
||||
"\u022F\x07u\x02\x02\u022F\u0230\x07g\x02\x02\u0230\xA3\x03\x02\x02\x02" +
|
||||
"\u0231\u0232\x07p\x02\x02\u0232\u0233\x07w\x02\x02\u0233\u0234\x07n\x02" +
|
||||
"\x02\u0234\u0235\x07n\x02\x02\u0235\xA5\x03\x02\x02\x02\u0236\u0237\x07" +
|
||||
"d\x02\x02\u0237\u0238\x07q\x02\x02\u0238\u0239\x07q\x02\x02\u0239\u023A" +
|
||||
"\x07n\x02\x02\u023A\u023B\x07g\x02\x02\u023B\u023C\x07c\x02\x02\u023C" +
|
||||
"\u025D\x07p\x02\x02\u023D\u023E\x07d\x02\x02\u023E\u023F\x07{\x02\x02" +
|
||||
"\u023F\u0240\x07v\x02\x02\u0240\u025D\x07g\x02\x02\u0241\u0242\x07u\x02" +
|
||||
"\x02\u0242\u0243\x07j\x02\x02\u0243\u0244\x07q\x02\x02\u0244\u0245\x07" +
|
||||
"t\x02\x02\u0245\u025D\x07v\x02\x02\u0246\u0247\x07e\x02\x02\u0247\u0248" +
|
||||
"\x07j\x02\x02\u0248\u0249\x07c\x02\x02\u0249\u025D\x07t\x02\x02\u024A" +
|
||||
"\u024B\x07k\x02\x02\u024B\u024C\x07p\x02\x02\u024C\u025D\x07v\x02\x02" +
|
||||
"\u024D\u024E\x07n\x02\x02\u024E\u024F\x07q\x02\x02\u024F\u0250\x07p\x02" +
|
||||
"\x02\u0250\u025D\x07i\x02\x02\u0251\u0252\x07h\x02\x02\u0252\u0253\x07" +
|
||||
"n\x02\x02\u0253\u0254\x07q\x02\x02\u0254\u0255\x07c\x02\x02\u0255\u025D" +
|
||||
"\x07v\x02\x02\u0256\u0257\x07f\x02\x02\u0257\u0258\x07q\x02\x02\u0258" +
|
||||
"\u0259\x07w\x02\x02\u0259\u025A\x07d\x02\x02\u025A\u025B\x07n\x02\x02" +
|
||||
"\u025B\u025D\x07g\x02\x02\u025C\u0236\x03\x02\x02\x02\u025C\u023D\x03" +
|
||||
"\x02\x02\x02\u025C\u0241\x03\x02\x02\x02\u025C\u0246\x03\x02\x02\x02\u025C" +
|
||||
"\u024A\x03\x02\x02\x02\u025C\u024D\x03\x02\x02\x02\u025C\u0251\x03\x02" +
|
||||
"\x02\x02\u025C\u0256\x03\x02\x02\x02\u025D\xA7\x03\x02\x02\x02\u025E\u025F" +
|
||||
"\x07f\x02\x02\u025F\u0260\x07g\x02\x02\u0260\u0261\x07h\x02\x02\u0261" +
|
||||
"\xA9\x03\x02\x02\x02\u0262\u0266\t\x13\x02\x02\u0263\u0265\t\x14\x02\x02" +
|
||||
"\u0264\u0263\x03\x02\x02\x02\u0265\u0268\x03\x02\x02\x02\u0266\u0264\x03" +
|
||||
"\x02\x02\x02\u0266\u0267\x03\x02\x02\x02\u0267\xAB\x03\x02\x02\x02\u0268" +
|
||||
"\u0266\x03\x02\x02\x02\u0269\u0272\x072\x02\x02\u026A\u026E\t\b\x02\x02" +
|
||||
"\u026B\u026D\t\t\x02\x02\u026C\u026B\x03\x02\x02\x02\u026D\u0270\x03\x02" +
|
||||
"\x02\x02\u026E\u026C\x03\x02\x02\x02\u026E\u026F\x03\x02\x02\x02\u026F" +
|
||||
"\u0272\x03\x02\x02\x02\u0270\u026E\x03\x02\x02\x02\u0271\u0269\x03\x02" +
|
||||
"\x02\x02\u0271\u026A\x03\x02\x02\x02\u0272\u0273\x03\x02\x02\x02\u0273" +
|
||||
"\u0274\bV\x04\x02\u0274\xAD\x03\x02\x02\x02\u0275\u0279\t\x13\x02\x02" +
|
||||
"\u0276\u0278\t\x14\x02\x02\u0277\u0276\x03\x02\x02\x02\u0278\u027B\x03" +
|
||||
"\x02\x02\x02\u0279\u0277\x03\x02\x02\x02\u0279\u027A\x03\x02\x02\x02\u027A" +
|
||||
"\u027C\x03\x02\x02\x02\u027B\u0279\x03\x02\x02\x02\u027C\u027D\bW\x04" +
|
||||
"\x02\u027D\xAF\x03\x02\x02\x02$\x02\x03\xB3\xBD\xC7\xCC\u01BF\u01C2\u01C9" +
|
||||
"\u01CC\u01D3\u01D6\u01D9\u01E0\u01E3\u01E9\u01EB\u01EF\u01F4\u01F6\u01F9" +
|
||||
"\u0201\u0203\u020D\u020F\u0213\u0219\u021B\u0221\u025C\u0266\u026E\u0271" +
|
||||
"\u0279\x05\b\x02\x02\x04\x03\x02\x04\x02\x02";
|
||||
public static readonly _serializedATN: string = Utils.join(
|
||||
[
|
||||
painless_lexer._serializedATNSegment0,
|
||||
painless_lexer._serializedATNSegment1,
|
||||
],
|
||||
"",
|
||||
);
|
||||
public static __ATN: ATN;
|
||||
public static readonly _serializedATN: number[] = [4,0,86,636,6,-1,6,-1,
|
||||
2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,
|
||||
2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,
|
||||
7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,
|
||||
23,2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,
|
||||
2,31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,
|
||||
38,7,38,2,39,7,39,2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,
|
||||
7,45,2,46,7,46,2,47,7,47,2,48,7,48,2,49,7,49,2,50,7,50,2,51,7,51,2,52,7,
|
||||
52,2,53,7,53,2,54,7,54,2,55,7,55,2,56,7,56,2,57,7,57,2,58,7,58,2,59,7,59,
|
||||
2,60,7,60,2,61,7,61,2,62,7,62,2,63,7,63,2,64,7,64,2,65,7,65,2,66,7,66,2,
|
||||
67,7,67,2,68,7,68,2,69,7,69,2,70,7,70,2,71,7,71,2,72,7,72,2,73,7,73,2,74,
|
||||
7,74,2,75,7,75,2,76,7,76,2,77,7,77,2,78,7,78,2,79,7,79,2,80,7,80,2,81,7,
|
||||
81,2,82,7,82,2,83,7,83,2,84,7,84,2,85,7,85,1,0,4,0,176,8,0,11,0,12,0,177,
|
||||
1,0,1,0,1,1,1,1,1,1,1,1,5,1,186,8,1,10,1,12,1,189,9,1,1,1,1,1,1,1,1,1,1,
|
||||
1,5,1,196,8,1,10,1,12,1,199,9,1,1,1,1,1,3,1,203,8,1,1,1,1,1,1,2,1,2,1,3,
|
||||
1,3,1,4,1,4,1,5,1,5,1,6,1,6,1,7,1,7,1,8,1,8,1,9,1,9,1,9,1,9,1,10,1,10,1,
|
||||
10,1,10,1,10,1,11,1,11,1,12,1,12,1,13,1,13,1,13,1,14,1,14,1,14,1,15,1,15,
|
||||
1,15,1,15,1,15,1,16,1,16,1,16,1,16,1,16,1,16,1,17,1,17,1,17,1,18,1,18,1,
|
||||
18,1,18,1,19,1,19,1,19,1,19,1,19,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,
|
||||
1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,23,1,
|
||||
23,1,23,1,23,1,24,1,24,1,24,1,24,1,24,1,24,1,25,1,25,1,25,1,25,1,25,1,25,
|
||||
1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,
|
||||
27,1,27,1,28,1,28,1,29,1,29,1,30,1,30,1,31,1,31,1,31,1,32,1,32,1,33,1,33,
|
||||
1,34,1,34,1,35,1,35,1,35,1,36,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,
|
||||
39,1,39,1,39,1,40,1,40,1,41,1,41,1,41,1,42,1,42,1,42,1,43,1,43,1,43,1,43,
|
||||
1,44,1,44,1,44,1,45,1,45,1,45,1,45,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,
|
||||
49,1,49,1,50,1,50,1,50,1,51,1,51,1,52,1,52,1,53,1,53,1,53,1,54,1,54,1,54,
|
||||
1,55,1,55,1,55,1,56,1,56,1,56,1,57,1,57,1,57,1,57,1,58,1,58,1,58,1,59,1,
|
||||
59,1,59,1,60,1,60,1,61,1,61,1,61,1,62,1,62,1,62,1,63,1,63,1,63,1,64,1,64,
|
||||
1,64,1,65,1,65,1,65,1,66,1,66,1,66,1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,
|
||||
69,1,69,1,69,1,70,1,70,1,70,1,70,1,71,1,71,1,71,1,71,1,71,1,72,1,72,4,72,
|
||||
444,8,72,11,72,12,72,445,1,72,3,72,449,8,72,1,73,1,73,1,73,4,73,454,8,73,
|
||||
11,73,12,73,455,1,73,3,73,459,8,73,1,74,1,74,1,74,5,74,464,8,74,10,74,12,
|
||||
74,467,9,74,3,74,469,8,74,1,74,3,74,472,8,74,1,75,1,75,1,75,5,75,477,8,
|
||||
75,10,75,12,75,480,9,75,3,75,482,8,75,1,75,1,75,4,75,486,8,75,11,75,12,
|
||||
75,487,3,75,490,8,75,1,75,1,75,3,75,494,8,75,1,75,4,75,497,8,75,11,75,12,
|
||||
75,498,3,75,501,8,75,1,75,3,75,504,8,75,1,76,1,76,1,76,1,76,1,76,1,76,5,
|
||||
76,512,8,76,10,76,12,76,515,9,76,1,76,1,76,1,76,1,76,1,76,1,76,1,76,5,76,
|
||||
524,8,76,10,76,12,76,527,9,76,1,76,3,76,530,8,76,1,77,1,77,1,77,1,77,4,
|
||||
77,536,8,77,11,77,12,77,537,1,77,1,77,5,77,542,8,77,10,77,12,77,545,9,77,
|
||||
1,77,1,77,1,78,1,78,1,78,1,78,1,78,1,79,1,79,1,79,1,79,1,79,1,79,1,80,1,
|
||||
80,1,80,1,80,1,80,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,
|
||||
1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,
|
||||
81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,1,81,3,81,603,
|
||||
8,81,1,82,1,82,1,82,1,82,1,83,1,83,5,83,611,8,83,10,83,12,83,614,9,83,1,
|
||||
84,1,84,1,84,5,84,619,8,84,10,84,12,84,622,9,84,3,84,624,8,84,1,84,1,84,
|
||||
1,85,1,85,5,85,630,8,85,10,85,12,85,633,9,85,1,85,1,85,5,187,197,513,525,
|
||||
537,0,86,2,1,4,2,6,3,8,4,10,5,12,6,14,7,16,8,18,9,20,10,22,11,24,12,26,
|
||||
13,28,14,30,15,32,16,34,17,36,18,38,19,40,20,42,21,44,22,46,23,48,24,50,
|
||||
25,52,26,54,27,56,28,58,29,60,30,62,31,64,32,66,33,68,34,70,35,72,36,74,
|
||||
37,76,38,78,39,80,40,82,41,84,42,86,43,88,44,90,45,92,46,94,47,96,48,98,
|
||||
49,100,50,102,51,104,52,106,53,108,54,110,55,112,56,114,57,116,58,118,59,
|
||||
120,60,122,61,124,62,126,63,128,64,130,65,132,66,134,67,136,68,138,69,140,
|
||||
70,142,71,144,72,146,73,148,74,150,75,152,76,154,77,156,78,158,79,160,80,
|
||||
162,81,164,82,166,83,168,84,170,85,172,86,2,0,1,19,3,0,9,10,13,13,32,32,
|
||||
2,0,10,10,13,13,1,0,48,55,2,0,76,76,108,108,2,0,88,88,120,120,3,0,48,57,
|
||||
65,70,97,102,1,0,49,57,1,0,48,57,6,0,68,68,70,70,76,76,100,100,102,102,
|
||||
108,108,2,0,69,69,101,101,2,0,43,43,45,45,4,0,68,68,70,70,100,100,102,102,
|
||||
2,0,34,34,92,92,2,0,39,39,92,92,1,0,10,10,2,0,10,10,47,47,7,0,85,85,99,
|
||||
99,105,105,108,109,115,115,117,117,120,120,3,0,65,90,95,95,97,122,4,0,48,
|
||||
57,65,90,95,95,97,122,674,0,2,1,0,0,0,0,4,1,0,0,0,0,6,1,0,0,0,0,8,1,0,0,
|
||||
0,0,10,1,0,0,0,0,12,1,0,0,0,0,14,1,0,0,0,0,16,1,0,0,0,0,18,1,0,0,0,0,20,
|
||||
1,0,0,0,0,22,1,0,0,0,0,24,1,0,0,0,0,26,1,0,0,0,0,28,1,0,0,0,0,30,1,0,0,
|
||||
0,0,32,1,0,0,0,0,34,1,0,0,0,0,36,1,0,0,0,0,38,1,0,0,0,0,40,1,0,0,0,0,42,
|
||||
1,0,0,0,0,44,1,0,0,0,0,46,1,0,0,0,0,48,1,0,0,0,0,50,1,0,0,0,0,52,1,0,0,
|
||||
0,0,54,1,0,0,0,0,56,1,0,0,0,0,58,1,0,0,0,0,60,1,0,0,0,0,62,1,0,0,0,0,64,
|
||||
1,0,0,0,0,66,1,0,0,0,0,68,1,0,0,0,0,70,1,0,0,0,0,72,1,0,0,0,0,74,1,0,0,
|
||||
0,0,76,1,0,0,0,0,78,1,0,0,0,0,80,1,0,0,0,0,82,1,0,0,0,0,84,1,0,0,0,0,86,
|
||||
1,0,0,0,0,88,1,0,0,0,0,90,1,0,0,0,0,92,1,0,0,0,0,94,1,0,0,0,0,96,1,0,0,
|
||||
0,0,98,1,0,0,0,0,100,1,0,0,0,0,102,1,0,0,0,0,104,1,0,0,0,0,106,1,0,0,0,
|
||||
0,108,1,0,0,0,0,110,1,0,0,0,0,112,1,0,0,0,0,114,1,0,0,0,0,116,1,0,0,0,0,
|
||||
118,1,0,0,0,0,120,1,0,0,0,0,122,1,0,0,0,0,124,1,0,0,0,0,126,1,0,0,0,0,128,
|
||||
1,0,0,0,0,130,1,0,0,0,0,132,1,0,0,0,0,134,1,0,0,0,0,136,1,0,0,0,0,138,1,
|
||||
0,0,0,0,140,1,0,0,0,0,142,1,0,0,0,0,144,1,0,0,0,0,146,1,0,0,0,0,148,1,0,
|
||||
0,0,0,150,1,0,0,0,0,152,1,0,0,0,0,154,1,0,0,0,0,156,1,0,0,0,0,158,1,0,0,
|
||||
0,0,160,1,0,0,0,0,162,1,0,0,0,0,164,1,0,0,0,0,166,1,0,0,0,0,168,1,0,0,0,
|
||||
1,170,1,0,0,0,1,172,1,0,0,0,2,175,1,0,0,0,4,202,1,0,0,0,6,206,1,0,0,0,8,
|
||||
208,1,0,0,0,10,210,1,0,0,0,12,212,1,0,0,0,14,214,1,0,0,0,16,216,1,0,0,0,
|
||||
18,218,1,0,0,0,20,220,1,0,0,0,22,224,1,0,0,0,24,229,1,0,0,0,26,231,1,0,
|
||||
0,0,28,233,1,0,0,0,30,236,1,0,0,0,32,239,1,0,0,0,34,244,1,0,0,0,36,250,
|
||||
1,0,0,0,38,253,1,0,0,0,40,257,1,0,0,0,42,266,1,0,0,0,44,272,1,0,0,0,46,
|
||||
279,1,0,0,0,48,283,1,0,0,0,50,287,1,0,0,0,52,293,1,0,0,0,54,299,1,0,0,0,
|
||||
56,304,1,0,0,0,58,315,1,0,0,0,60,317,1,0,0,0,62,319,1,0,0,0,64,321,1,0,
|
||||
0,0,66,324,1,0,0,0,68,326,1,0,0,0,70,328,1,0,0,0,72,330,1,0,0,0,74,333,
|
||||
1,0,0,0,76,336,1,0,0,0,78,340,1,0,0,0,80,342,1,0,0,0,82,345,1,0,0,0,84,
|
||||
347,1,0,0,0,86,350,1,0,0,0,88,353,1,0,0,0,90,357,1,0,0,0,92,360,1,0,0,0,
|
||||
94,364,1,0,0,0,96,366,1,0,0,0,98,368,1,0,0,0,100,370,1,0,0,0,102,373,1,
|
||||
0,0,0,104,376,1,0,0,0,106,378,1,0,0,0,108,380,1,0,0,0,110,383,1,0,0,0,112,
|
||||
386,1,0,0,0,114,389,1,0,0,0,116,392,1,0,0,0,118,396,1,0,0,0,120,399,1,0,
|
||||
0,0,122,402,1,0,0,0,124,404,1,0,0,0,126,407,1,0,0,0,128,410,1,0,0,0,130,
|
||||
413,1,0,0,0,132,416,1,0,0,0,134,419,1,0,0,0,136,422,1,0,0,0,138,425,1,0,
|
||||
0,0,140,428,1,0,0,0,142,432,1,0,0,0,144,436,1,0,0,0,146,441,1,0,0,0,148,
|
||||
450,1,0,0,0,150,468,1,0,0,0,152,481,1,0,0,0,154,529,1,0,0,0,156,531,1,0,
|
||||
0,0,158,548,1,0,0,0,160,553,1,0,0,0,162,559,1,0,0,0,164,602,1,0,0,0,166,
|
||||
604,1,0,0,0,168,608,1,0,0,0,170,623,1,0,0,0,172,627,1,0,0,0,174,176,7,0,
|
||||
0,0,175,174,1,0,0,0,176,177,1,0,0,0,177,175,1,0,0,0,177,178,1,0,0,0,178,
|
||||
179,1,0,0,0,179,180,6,0,0,0,180,3,1,0,0,0,181,182,5,47,0,0,182,183,5,47,
|
||||
0,0,183,187,1,0,0,0,184,186,9,0,0,0,185,184,1,0,0,0,186,189,1,0,0,0,187,
|
||||
188,1,0,0,0,187,185,1,0,0,0,188,190,1,0,0,0,189,187,1,0,0,0,190,203,7,1,
|
||||
0,0,191,192,5,47,0,0,192,193,5,42,0,0,193,197,1,0,0,0,194,196,9,0,0,0,195,
|
||||
194,1,0,0,0,196,199,1,0,0,0,197,198,1,0,0,0,197,195,1,0,0,0,198,200,1,0,
|
||||
0,0,199,197,1,0,0,0,200,201,5,42,0,0,201,203,5,47,0,0,202,181,1,0,0,0,202,
|
||||
191,1,0,0,0,203,204,1,0,0,0,204,205,6,1,0,0,205,5,1,0,0,0,206,207,5,123,
|
||||
0,0,207,7,1,0,0,0,208,209,5,125,0,0,209,9,1,0,0,0,210,211,5,91,0,0,211,
|
||||
11,1,0,0,0,212,213,5,93,0,0,213,13,1,0,0,0,214,215,5,40,0,0,215,15,1,0,
|
||||
0,0,216,217,5,41,0,0,217,17,1,0,0,0,218,219,5,36,0,0,219,19,1,0,0,0,220,
|
||||
221,5,46,0,0,221,222,1,0,0,0,222,223,6,9,1,0,223,21,1,0,0,0,224,225,5,63,
|
||||
0,0,225,226,5,46,0,0,226,227,1,0,0,0,227,228,6,10,1,0,228,23,1,0,0,0,229,
|
||||
230,5,44,0,0,230,25,1,0,0,0,231,232,5,59,0,0,232,27,1,0,0,0,233,234,5,105,
|
||||
0,0,234,235,5,102,0,0,235,29,1,0,0,0,236,237,5,105,0,0,237,238,5,110,0,
|
||||
0,238,31,1,0,0,0,239,240,5,101,0,0,240,241,5,108,0,0,241,242,5,115,0,0,
|
||||
242,243,5,101,0,0,243,33,1,0,0,0,244,245,5,119,0,0,245,246,5,104,0,0,246,
|
||||
247,5,105,0,0,247,248,5,108,0,0,248,249,5,101,0,0,249,35,1,0,0,0,250,251,
|
||||
5,100,0,0,251,252,5,111,0,0,252,37,1,0,0,0,253,254,5,102,0,0,254,255,5,
|
||||
111,0,0,255,256,5,114,0,0,256,39,1,0,0,0,257,258,5,99,0,0,258,259,5,111,
|
||||
0,0,259,260,5,110,0,0,260,261,5,116,0,0,261,262,5,105,0,0,262,263,5,110,
|
||||
0,0,263,264,5,117,0,0,264,265,5,101,0,0,265,41,1,0,0,0,266,267,5,98,0,0,
|
||||
267,268,5,114,0,0,268,269,5,101,0,0,269,270,5,97,0,0,270,271,5,107,0,0,
|
||||
271,43,1,0,0,0,272,273,5,114,0,0,273,274,5,101,0,0,274,275,5,116,0,0,275,
|
||||
276,5,117,0,0,276,277,5,114,0,0,277,278,5,110,0,0,278,45,1,0,0,0,279,280,
|
||||
5,110,0,0,280,281,5,101,0,0,281,282,5,119,0,0,282,47,1,0,0,0,283,284,5,
|
||||
116,0,0,284,285,5,114,0,0,285,286,5,121,0,0,286,49,1,0,0,0,287,288,5,99,
|
||||
0,0,288,289,5,97,0,0,289,290,5,116,0,0,290,291,5,99,0,0,291,292,5,104,0,
|
||||
0,292,51,1,0,0,0,293,294,5,116,0,0,294,295,5,104,0,0,295,296,5,114,0,0,
|
||||
296,297,5,111,0,0,297,298,5,119,0,0,298,53,1,0,0,0,299,300,5,116,0,0,300,
|
||||
301,5,104,0,0,301,302,5,105,0,0,302,303,5,115,0,0,303,55,1,0,0,0,304,305,
|
||||
5,105,0,0,305,306,5,110,0,0,306,307,5,115,0,0,307,308,5,116,0,0,308,309,
|
||||
5,97,0,0,309,310,5,110,0,0,310,311,5,99,0,0,311,312,5,101,0,0,312,313,5,
|
||||
111,0,0,313,314,5,102,0,0,314,57,1,0,0,0,315,316,5,33,0,0,316,59,1,0,0,
|
||||
0,317,318,5,126,0,0,318,61,1,0,0,0,319,320,5,42,0,0,320,63,1,0,0,0,321,
|
||||
322,5,47,0,0,322,323,4,31,0,0,323,65,1,0,0,0,324,325,5,37,0,0,325,67,1,
|
||||
0,0,0,326,327,5,43,0,0,327,69,1,0,0,0,328,329,5,45,0,0,329,71,1,0,0,0,330,
|
||||
331,5,60,0,0,331,332,5,60,0,0,332,73,1,0,0,0,333,334,5,62,0,0,334,335,5,
|
||||
62,0,0,335,75,1,0,0,0,336,337,5,62,0,0,337,338,5,62,0,0,338,339,5,62,0,
|
||||
0,339,77,1,0,0,0,340,341,5,60,0,0,341,79,1,0,0,0,342,343,5,60,0,0,343,344,
|
||||
5,61,0,0,344,81,1,0,0,0,345,346,5,62,0,0,346,83,1,0,0,0,347,348,5,62,0,
|
||||
0,348,349,5,61,0,0,349,85,1,0,0,0,350,351,5,61,0,0,351,352,5,61,0,0,352,
|
||||
87,1,0,0,0,353,354,5,61,0,0,354,355,5,61,0,0,355,356,5,61,0,0,356,89,1,
|
||||
0,0,0,357,358,5,33,0,0,358,359,5,61,0,0,359,91,1,0,0,0,360,361,5,33,0,0,
|
||||
361,362,5,61,0,0,362,363,5,61,0,0,363,93,1,0,0,0,364,365,5,38,0,0,365,95,
|
||||
1,0,0,0,366,367,5,94,0,0,367,97,1,0,0,0,368,369,5,124,0,0,369,99,1,0,0,
|
||||
0,370,371,5,38,0,0,371,372,5,38,0,0,372,101,1,0,0,0,373,374,5,124,0,0,374,
|
||||
375,5,124,0,0,375,103,1,0,0,0,376,377,5,63,0,0,377,105,1,0,0,0,378,379,
|
||||
5,58,0,0,379,107,1,0,0,0,380,381,5,63,0,0,381,382,5,58,0,0,382,109,1,0,
|
||||
0,0,383,384,5,58,0,0,384,385,5,58,0,0,385,111,1,0,0,0,386,387,5,45,0,0,
|
||||
387,388,5,62,0,0,388,113,1,0,0,0,389,390,5,61,0,0,390,391,5,126,0,0,391,
|
||||
115,1,0,0,0,392,393,5,61,0,0,393,394,5,61,0,0,394,395,5,126,0,0,395,117,
|
||||
1,0,0,0,396,397,5,43,0,0,397,398,5,43,0,0,398,119,1,0,0,0,399,400,5,45,
|
||||
0,0,400,401,5,45,0,0,401,121,1,0,0,0,402,403,5,61,0,0,403,123,1,0,0,0,404,
|
||||
405,5,43,0,0,405,406,5,61,0,0,406,125,1,0,0,0,407,408,5,45,0,0,408,409,
|
||||
5,61,0,0,409,127,1,0,0,0,410,411,5,42,0,0,411,412,5,61,0,0,412,129,1,0,
|
||||
0,0,413,414,5,47,0,0,414,415,5,61,0,0,415,131,1,0,0,0,416,417,5,37,0,0,
|
||||
417,418,5,61,0,0,418,133,1,0,0,0,419,420,5,38,0,0,420,421,5,61,0,0,421,
|
||||
135,1,0,0,0,422,423,5,94,0,0,423,424,5,61,0,0,424,137,1,0,0,0,425,426,5,
|
||||
124,0,0,426,427,5,61,0,0,427,139,1,0,0,0,428,429,5,60,0,0,429,430,5,60,
|
||||
0,0,430,431,5,61,0,0,431,141,1,0,0,0,432,433,5,62,0,0,433,434,5,62,0,0,
|
||||
434,435,5,61,0,0,435,143,1,0,0,0,436,437,5,62,0,0,437,438,5,62,0,0,438,
|
||||
439,5,62,0,0,439,440,5,61,0,0,440,145,1,0,0,0,441,443,5,48,0,0,442,444,
|
||||
7,2,0,0,443,442,1,0,0,0,444,445,1,0,0,0,445,443,1,0,0,0,445,446,1,0,0,0,
|
||||
446,448,1,0,0,0,447,449,7,3,0,0,448,447,1,0,0,0,448,449,1,0,0,0,449,147,
|
||||
1,0,0,0,450,451,5,48,0,0,451,453,7,4,0,0,452,454,7,5,0,0,453,452,1,0,0,
|
||||
0,454,455,1,0,0,0,455,453,1,0,0,0,455,456,1,0,0,0,456,458,1,0,0,0,457,459,
|
||||
7,3,0,0,458,457,1,0,0,0,458,459,1,0,0,0,459,149,1,0,0,0,460,469,5,48,0,
|
||||
0,461,465,7,6,0,0,462,464,7,7,0,0,463,462,1,0,0,0,464,467,1,0,0,0,465,463,
|
||||
1,0,0,0,465,466,1,0,0,0,466,469,1,0,0,0,467,465,1,0,0,0,468,460,1,0,0,0,
|
||||
468,461,1,0,0,0,469,471,1,0,0,0,470,472,7,8,0,0,471,470,1,0,0,0,471,472,
|
||||
1,0,0,0,472,151,1,0,0,0,473,482,5,48,0,0,474,478,7,6,0,0,475,477,7,7,0,
|
||||
0,476,475,1,0,0,0,477,480,1,0,0,0,478,476,1,0,0,0,478,479,1,0,0,0,479,482,
|
||||
1,0,0,0,480,478,1,0,0,0,481,473,1,0,0,0,481,474,1,0,0,0,482,489,1,0,0,0,
|
||||
483,485,3,20,9,0,484,486,7,7,0,0,485,484,1,0,0,0,486,487,1,0,0,0,487,485,
|
||||
1,0,0,0,487,488,1,0,0,0,488,490,1,0,0,0,489,483,1,0,0,0,489,490,1,0,0,0,
|
||||
490,500,1,0,0,0,491,493,7,9,0,0,492,494,7,10,0,0,493,492,1,0,0,0,493,494,
|
||||
1,0,0,0,494,496,1,0,0,0,495,497,7,7,0,0,496,495,1,0,0,0,497,498,1,0,0,0,
|
||||
498,496,1,0,0,0,498,499,1,0,0,0,499,501,1,0,0,0,500,491,1,0,0,0,500,501,
|
||||
1,0,0,0,501,503,1,0,0,0,502,504,7,11,0,0,503,502,1,0,0,0,503,504,1,0,0,
|
||||
0,504,153,1,0,0,0,505,513,5,34,0,0,506,507,5,92,0,0,507,512,5,34,0,0,508,
|
||||
509,5,92,0,0,509,512,5,92,0,0,510,512,8,12,0,0,511,506,1,0,0,0,511,508,
|
||||
1,0,0,0,511,510,1,0,0,0,512,515,1,0,0,0,513,514,1,0,0,0,513,511,1,0,0,0,
|
||||
514,516,1,0,0,0,515,513,1,0,0,0,516,530,5,34,0,0,517,525,5,39,0,0,518,519,
|
||||
5,92,0,0,519,524,5,39,0,0,520,521,5,92,0,0,521,524,5,92,0,0,522,524,8,13,
|
||||
0,0,523,518,1,0,0,0,523,520,1,0,0,0,523,522,1,0,0,0,524,527,1,0,0,0,525,
|
||||
526,1,0,0,0,525,523,1,0,0,0,526,528,1,0,0,0,527,525,1,0,0,0,528,530,5,39,
|
||||
0,0,529,505,1,0,0,0,529,517,1,0,0,0,530,155,1,0,0,0,531,535,5,47,0,0,532,
|
||||
533,5,92,0,0,533,536,8,14,0,0,534,536,8,15,0,0,535,532,1,0,0,0,535,534,
|
||||
1,0,0,0,536,537,1,0,0,0,537,538,1,0,0,0,537,535,1,0,0,0,538,539,1,0,0,0,
|
||||
539,543,5,47,0,0,540,542,7,16,0,0,541,540,1,0,0,0,542,545,1,0,0,0,543,541,
|
||||
1,0,0,0,543,544,1,0,0,0,544,546,1,0,0,0,545,543,1,0,0,0,546,547,4,77,1,
|
||||
0,547,157,1,0,0,0,548,549,5,116,0,0,549,550,5,114,0,0,550,551,5,117,0,0,
|
||||
551,552,5,101,0,0,552,159,1,0,0,0,553,554,5,102,0,0,554,555,5,97,0,0,555,
|
||||
556,5,108,0,0,556,557,5,115,0,0,557,558,5,101,0,0,558,161,1,0,0,0,559,560,
|
||||
5,110,0,0,560,561,5,117,0,0,561,562,5,108,0,0,562,563,5,108,0,0,563,163,
|
||||
1,0,0,0,564,565,5,98,0,0,565,566,5,111,0,0,566,567,5,111,0,0,567,568,5,
|
||||
108,0,0,568,569,5,101,0,0,569,570,5,97,0,0,570,603,5,110,0,0,571,572,5,
|
||||
98,0,0,572,573,5,121,0,0,573,574,5,116,0,0,574,603,5,101,0,0,575,576,5,
|
||||
115,0,0,576,577,5,104,0,0,577,578,5,111,0,0,578,579,5,114,0,0,579,603,5,
|
||||
116,0,0,580,581,5,99,0,0,581,582,5,104,0,0,582,583,5,97,0,0,583,603,5,114,
|
||||
0,0,584,585,5,105,0,0,585,586,5,110,0,0,586,603,5,116,0,0,587,588,5,108,
|
||||
0,0,588,589,5,111,0,0,589,590,5,110,0,0,590,603,5,103,0,0,591,592,5,102,
|
||||
0,0,592,593,5,108,0,0,593,594,5,111,0,0,594,595,5,97,0,0,595,603,5,116,
|
||||
0,0,596,597,5,100,0,0,597,598,5,111,0,0,598,599,5,117,0,0,599,600,5,98,
|
||||
0,0,600,601,5,108,0,0,601,603,5,101,0,0,602,564,1,0,0,0,602,571,1,0,0,0,
|
||||
602,575,1,0,0,0,602,580,1,0,0,0,602,584,1,0,0,0,602,587,1,0,0,0,602,591,
|
||||
1,0,0,0,602,596,1,0,0,0,603,165,1,0,0,0,604,605,5,100,0,0,605,606,5,101,
|
||||
0,0,606,607,5,102,0,0,607,167,1,0,0,0,608,612,7,17,0,0,609,611,7,18,0,0,
|
||||
610,609,1,0,0,0,611,614,1,0,0,0,612,610,1,0,0,0,612,613,1,0,0,0,613,169,
|
||||
1,0,0,0,614,612,1,0,0,0,615,624,5,48,0,0,616,620,7,6,0,0,617,619,7,7,0,
|
||||
0,618,617,1,0,0,0,619,622,1,0,0,0,620,618,1,0,0,0,620,621,1,0,0,0,621,624,
|
||||
1,0,0,0,622,620,1,0,0,0,623,615,1,0,0,0,623,616,1,0,0,0,624,625,1,0,0,0,
|
||||
625,626,6,84,2,0,626,171,1,0,0,0,627,631,7,17,0,0,628,630,7,18,0,0,629,
|
||||
628,1,0,0,0,630,633,1,0,0,0,631,629,1,0,0,0,631,632,1,0,0,0,632,634,1,0,
|
||||
0,0,633,631,1,0,0,0,634,635,6,85,2,0,635,173,1,0,0,0,34,0,1,177,187,197,
|
||||
202,445,448,455,458,465,468,471,478,481,487,489,493,498,500,503,511,513,
|
||||
523,525,529,535,537,543,602,612,620,623,631,3,6,0,0,2,1,0,2,0,0];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
if (!painless_lexer.__ATN) {
|
||||
painless_lexer.__ATN = new ATNDeserializer().deserialize(Utils.toCharArray(painless_lexer._serializedATN));
|
||||
painless_lexer.__ATN = new ATNDeserializer().deserialize(painless_lexer._serializedATN);
|
||||
}
|
||||
|
||||
return painless_lexer.__ATN;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static DecisionsToDFA = painless_lexer._ATN.decisionToState.map( (ds: DecisionState, index: number) => new DFA(ds, index) );
|
||||
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -6,8 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Token } from 'antlr4ts';
|
||||
import { painless_lexer as PainlessLexer } from '../../antlr/painless_lexer';
|
||||
import type { Token } from 'antlr4';
|
||||
import { default as PainlessLexer } from '../../antlr/painless_lexer';
|
||||
|
||||
/*
|
||||
* This extends the PainlessLexer class in order to handle backslashes appropriately
|
||||
|
|
|
@ -6,11 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CommonTokenStream, CharStreams } from 'antlr4ts';
|
||||
import { painless_parser as PainlessParser, SourceContext } from '../../antlr/painless_parser';
|
||||
import { CommonTokenStream, CharStreams } from 'antlr4';
|
||||
import { default as PainlessParser, SourceContext } from '../../antlr/painless_parser';
|
||||
import { PainlessLexerEnhanced } from './lexer';
|
||||
import { EditorError } from '../../../types';
|
||||
import { ANTLREErrorListener } from '../../../common/error_listener';
|
||||
import { ANTLRErrorListener } from '../../../common/error_listener';
|
||||
|
||||
const parse = (
|
||||
code: string
|
||||
|
@ -20,7 +20,7 @@ const parse = (
|
|||
} => {
|
||||
const inputStream = CharStreams.fromString(code);
|
||||
const lexer = new PainlessLexerEnhanced(inputStream);
|
||||
const painlessLangErrorListener = new ANTLREErrorListener();
|
||||
const painlessLangErrorListener = new ANTLRErrorListener();
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
const parser = new PainlessParser(tokenStream);
|
||||
|
||||
|
|
13
yarn.lock
13
yarn.lock
|
@ -11422,15 +11422,10 @@ ansi-wrap@0.1.0, ansi-wrap@^0.1.0:
|
|||
resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf"
|
||||
integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768=
|
||||
|
||||
antlr4ts-cli@^0.5.0-alpha.3:
|
||||
version "0.5.0-alpha.3"
|
||||
resolved "https://registry.yarnpkg.com/antlr4ts-cli/-/antlr4ts-cli-0.5.0-alpha.3.tgz#1f581b2a3c840d3921a2f3b1e739e48c7e7c18cd"
|
||||
integrity sha512-i6oyxfaXU6qnw4HgyeSIsOLlsvT7zU3vmenoJKFNVFP1QNodtJMZYpnyxc8TmOFpJs7fEoWanLavSSDEmcCZBQ==
|
||||
|
||||
antlr4ts@^0.5.0-alpha.3:
|
||||
version "0.5.0-alpha.3"
|
||||
resolved "https://registry.yarnpkg.com/antlr4ts/-/antlr4ts-0.5.0-alpha.3.tgz#fa6d39d88d6b96341a8afef45867af9abcb38766"
|
||||
integrity sha512-La89tKkGcHFIVuruv4Bm1esc3zLmES2NOTEwwNS1pudz+zx/0FNqQeUu9p48i9/QHKPVqjN87LB+q3buTg7oDQ==
|
||||
antlr4@^4.13.1-patch-1:
|
||||
version "4.13.1-patch-1"
|
||||
resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.13.1-patch-1.tgz#946176f863f890964a050c4f18c47fd6f7e57602"
|
||||
integrity sha512-OjFLWWLzDMV9rdFhpvroCWR4ooktNg9/nvVYSA5z28wuVpU36QUNuioR1XLnQtcjVlf8npjyz593PxnU/f/Cow==
|
||||
|
||||
any-observable@^0.3.0:
|
||||
version "0.3.0"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue