mirror of
https://github.com/elastic/kibana.git
synced 2025-06-28 03:01:21 -04:00
[ES|QL] pull in multi-file grammars (#212430)
## Summary Resolve https://github.com/elastic/kibana/issues/210518 Successful grammar update PR! https://github.com/elastic/kibana/pull/212442
This commit is contained in:
parent
443fd5ba35
commit
2435a17cfe
4 changed files with 19 additions and 189 deletions
|
@ -4,10 +4,13 @@ set -euo pipefail
|
|||
synchronize_lexer_grammar () {
|
||||
license_header="$1"
|
||||
source_file="$PARENT_DIR/elasticsearch/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4"
|
||||
source_lib_dir="$PARENT_DIR/elasticsearch/x-pack/plugin/esql/src/main/antlr/lexer"
|
||||
destination_file="./src/platform/packages/shared/kbn-esql-ast/src/antlr/esql_lexer.g4"
|
||||
destination_lib_dir="./src/platform/packages/shared/kbn-esql-ast/src/antlr/lexer"
|
||||
|
||||
# Copy the file
|
||||
cp "$source_file" "$destination_file"
|
||||
cp -r "$source_lib_dir" "$destination_lib_dir"
|
||||
|
||||
# Insert the license header
|
||||
temp_file=$(mktemp)
|
||||
|
@ -26,10 +29,13 @@ synchronize_lexer_grammar () {
|
|||
synchronize_parser_grammar () {
|
||||
license_header="$1"
|
||||
source_file="$PARENT_DIR/elasticsearch/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4"
|
||||
source_lib_dir="$PARENT_DIR/elasticsearch/x-pack/plugin/esql/src/main/antlr/parser"
|
||||
destination_file="./src/platform/packages/shared/kbn-esql-ast/src/antlr/esql_parser.g4"
|
||||
destination_lib_dir="./src/platform/packages/shared/kbn-esql-ast/src/antlr/parser"
|
||||
|
||||
# Copy the file
|
||||
cp "$source_file" "$destination_file"
|
||||
cp -r "$source_lib_dir" "$destination_lib_dir"
|
||||
|
||||
# Insert the license header
|
||||
temp_file=$(mktemp)
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
{
|
||||
"name": "@kbn/esql-ast",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"license": "Elastic License 2.0 OR AGPL-3.0-only OR SSPL-1.0",
|
||||
"scripts": {
|
||||
"build:antlr4:esql": "antlr -Dlanguage=TypeScript src/antlr/esql_lexer.g4 src/antlr/esql_parser.g4 && node ./scripts/fix_generated_antlr.js && node ./scripts/esql_update_ast_script.js",
|
||||
"prebuild:antlr4": "brew bundle --file=./scripts/antlr4_tools/brewfile",
|
||||
"build:antlr4": "npm run build:antlr4:esql"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
"name": "@kbn/esql-ast",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"license": "Elastic License 2.0 OR AGPL-3.0-only OR SSPL-1.0",
|
||||
"scripts": {
|
||||
"build:antlr4:esql:parser": "antlr -Dlanguage=TypeScript -lib src/antlr/parser src/antlr/esql_parser.g4",
|
||||
"build:antlr4:esql:lexer": "antlr -Dlanguage=TypeScript -lib src/antlr/lexer src/antlr/esql_lexer.g4",
|
||||
"build:antlr4:esql": "npm run build:antlr4:esql:parser && npm run build:antlr4:esql:lexer",
|
||||
"prebuild:antlr4": "brew bundle --file=./scripts/antlr4_tools/brewfile",
|
||||
"build:antlr4": "npm run build:antlr4:esql"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
const { join } = require('path');
|
||||
const { ESLint } = require('eslint');
|
||||
const partition = require('lodash/partition');
|
||||
const { readdirSync, readFileSync, writeFileSync } = require('fs');
|
||||
const ora = require('ora');
|
||||
const log = ora('Updating ES|QL AST walker from antlr grammar').start();
|
||||
|
||||
async function execute() {
|
||||
const generatedAntlrFolder = join(__dirname, '..', 'src', 'antlr');
|
||||
|
||||
const generatedAntlrFolderContents = readdirSync(generatedAntlrFolder);
|
||||
|
||||
const tokenRegex = /public static readonly (?<name>[A-Z_]*(UN)*QUOTED_[A-Z_]+) = (?<value>\d+);/;
|
||||
const lexerFile = generatedAntlrFolderContents.find((file) => file === 'esql_parser.ts');
|
||||
const lexerFileRows = readFileSync(join(generatedAntlrFolder, lexerFile), 'utf8')
|
||||
.toString()
|
||||
.split('\n');
|
||||
const tokenList = [];
|
||||
for (const row of lexerFileRows) {
|
||||
const match = row.match(tokenRegex);
|
||||
if (match?.groups) {
|
||||
tokenList.push(match.groups);
|
||||
}
|
||||
}
|
||||
const [unquotedList, quotedList] = partition(tokenList, ({ name }) => /UNQUOTED/.test(name));
|
||||
|
||||
// now all quote/unquoted tokens are registered
|
||||
// dump them into the ast_helper file
|
||||
const astHelperFileFolder = join(__dirname, '..', 'src');
|
||||
const astHelperFilename = 'ast_helpers.ts';
|
||||
|
||||
try {
|
||||
const astHelperContentRows = readFileSync(join(astHelperFileFolder, astHelperFilename), 'utf8')
|
||||
.toString()
|
||||
.split('\n');
|
||||
|
||||
const startAutoGeneratedComment = astHelperContentRows.findIndex(
|
||||
(row) => row === '/* SCRIPT_MARKER_START */'
|
||||
);
|
||||
const endAutoGeneratedComment =
|
||||
astHelperContentRows.findIndex((row) => row === '/* SCRIPT_MARKER_END */') + 1;
|
||||
|
||||
const newFunctionsContent = `
|
||||
/* SCRIPT_MARKER_START */
|
||||
function getQuotedText(ctx: ParserRuleContext) {
|
||||
return [
|
||||
${quotedList.map(({ name, value }) => `${value} /* esql_parser.${name} */`).join(', ')}
|
||||
]
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
|
||||
function getUnquotedText(ctx: ParserRuleContext) {
|
||||
return [
|
||||
${unquotedList.map(({ name, value }) => `${value} /* esql_parser.${name} */`).join(', ')}
|
||||
|
||||
]
|
||||
.map((keyCode) => ctx.getToken(keyCode, 0))
|
||||
.filter(nonNullable)[0];
|
||||
}
|
||||
/* SCRIPT_MARKER_END */
|
||||
`;
|
||||
|
||||
const fileContent = astHelperContentRows
|
||||
.slice(0, startAutoGeneratedComment)
|
||||
.concat(newFunctionsContent.split('\n'), astHelperContentRows.slice(endAutoGeneratedComment));
|
||||
|
||||
const fileContentString = fileContent.join('\n');
|
||||
|
||||
const eslint = new ESLint({
|
||||
fix: true,
|
||||
overrideConfig: {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2018,
|
||||
},
|
||||
rules: {
|
||||
'@kbn/imports/no_unresolvable_imports': 'off',
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{
|
||||
parser: 'typescript',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const results = await eslint.lintText(fileContentString);
|
||||
|
||||
if (results.some(({ messages }) => messages.length > 0)) {
|
||||
const formatter = await eslint.loadFormatter('stylish');
|
||||
const resultText = formatter.format(results);
|
||||
process.exitCode = 1;
|
||||
return log.fail(resultText);
|
||||
}
|
||||
|
||||
const filePath = join(astHelperFileFolder, astHelperFilename);
|
||||
writeFileSync(filePath, results[0].output || '', { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
return log.fail(err.message);
|
||||
}
|
||||
|
||||
log.succeed('Updated ES|QL helper from antlr grammar successfully');
|
||||
}
|
||||
|
||||
execute();
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
const { join } = require('path');
|
||||
const { readdirSync, readFileSync, writeFileSync, renameSync } = require('fs');
|
||||
const ora = require('ora');
|
||||
const log = ora('Updating generated antlr grammar').start();
|
||||
|
||||
function execute() {
|
||||
const generatedAntlrFolder = join(__dirname, '..', 'src', 'antlr');
|
||||
|
||||
const generatedAntlrFolderContents = readdirSync(generatedAntlrFolder);
|
||||
|
||||
// The generated TS produces some TS linting errors
|
||||
// This script adds a //@ts-nocheck comment at the top of each generated file
|
||||
// so that the errors can be ignored for now
|
||||
generatedAntlrFolderContents
|
||||
.filter((file) => {
|
||||
const fileExtension = file.split('.')[1];
|
||||
return fileExtension.includes('ts');
|
||||
})
|
||||
.forEach((file) => {
|
||||
try {
|
||||
const fileContentRows = readFileSync(join(generatedAntlrFolder, file), 'utf8')
|
||||
.toString()
|
||||
.split('\n');
|
||||
|
||||
if (!/\@ts-nocheck/.test(fileContentRows[0])) {
|
||||
fileContentRows.unshift('// @ts-nocheck');
|
||||
}
|
||||
|
||||
const filePath = join(generatedAntlrFolder, file);
|
||||
const fileContent = fileContentRows.join('\n');
|
||||
|
||||
writeFileSync(filePath, fileContent, { encoding: 'utf8' });
|
||||
} catch (err) {
|
||||
return log.fail(err.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Rename generated parserListener file to snakecase to satisfy file casing check
|
||||
// There doesn't appear to be a way to fix this OOTB with antlr4
|
||||
try {
|
||||
renameSync(
|
||||
join(generatedAntlrFolder, `esql_parserListener.ts`),
|
||||
join(generatedAntlrFolder, `esql_parser_listener.ts`)
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn(`Unable to rename parserListener file to snake-case: ${err.message}`);
|
||||
}
|
||||
|
||||
log.succeed('Updated generated antlr grammar successfully');
|
||||
}
|
||||
|
||||
execute();
|
Loading…
Add table
Add a link
Reference in a new issue