[ts] enable sourcemaps in summarized types of @kbn/crypto (#126410)

* [ts] enable sourcemaps in summarized types of @kbn/crypto

* update snapshots

* remove unnecessary exports of @kbn/type-summarizer package

* remove tsc from the build process

* use `@kbn/type-summarizer` to summarize its own types

* add tests for interface and function

* switch to export type where necessary

* ignore __tmp__ in global jest preset

* ignore __tmp__ globally

* remove `@kbn/crypto` types path
This commit is contained in:
Spencer 2022-03-04 08:02:02 -06:00 committed by GitHub
parent 1174ac0cfc
commit eb68e95acd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
62 changed files with 2783 additions and 276 deletions

View file

@ -1,4 +1,5 @@
**/*.js.snap
__tmp__
/.es
/.chromium
/build

View file

@ -104,6 +104,7 @@ const DEV_PACKAGES = [
'kbn-storybook',
'kbn-telemetry-tools',
'kbn-test',
'kbn-type-summarizer',
];
/** Directories (at any depth) which include dev-only code. */
@ -1632,28 +1633,6 @@ module.exports = {
},
},
/**
* Prettier disables all conflicting rules, listing as last override so it takes precedence
*/
{
files: ['**/*'],
rules: {
...require('eslint-config-prettier').rules,
...require('eslint-config-prettier/react').rules,
...require('eslint-config-prettier/@typescript-eslint').rules,
},
},
/**
* Enterprise Search Prettier override
* Lints unnecessary backticks - @see https://github.com/prettier/eslint-config-prettier/blob/main/README.md#forbid-unnecessary-backticks
*/
{
files: ['x-pack/plugins/enterprise_search/**/*.{ts,tsx}'],
rules: {
quotes: ['error', 'single', { avoidEscape: true, allowTemplateLiterals: false }],
},
},
/**
* Platform Security Team overrides
*/
@ -1768,5 +1747,34 @@ module.exports = {
'@kbn/eslint/no_export_all': 'error',
},
},
{
files: ['packages/kbn-type-summarizer/**/*.ts'],
rules: {
'no-bitwise': 'off',
},
},
/**
* Prettier disables all conflicting rules, listing as last override so it takes precedence
*/
{
files: ['**/*'],
rules: {
...require('eslint-config-prettier').rules,
...require('eslint-config-prettier/react').rules,
...require('eslint-config-prettier/@typescript-eslint').rules,
},
},
/**
* Enterprise Search Prettier override
* Lints unnecessary backticks - @see https://github.com/prettier/eslint-config-prettier/blob/main/README.md#forbid-unnecessary-backticks
*/
{
files: ['x-pack/plugins/enterprise_search/**/*.{ts,tsx}'],
rules: {
quotes: ['error', 'single', { avoidEscape: true, allowTemplateLiterals: false }],
},
},
],
};

1
.gitignore vendored
View file

@ -20,6 +20,7 @@ target
*.iml
*.log
types.eslint.config.js
__tmp__
# Ignore example plugin builds
/examples/*/build

View file

@ -479,6 +479,7 @@
"@kbn/test": "link:bazel-bin/packages/kbn-test",
"@kbn/test-jest-helpers": "link:bazel-bin/packages/kbn-test-jest-helpers",
"@kbn/test-subj-selector": "link:bazel-bin/packages/kbn-test-subj-selector",
"@kbn/type-summarizer": "link:bazel-bin/packages/kbn-type-summarizer",
"@loaders.gl/polyfills": "^2.3.5",
"@mapbox/vector-tile": "1.3.1",
"@microsoft/api-documenter": "7.13.68",
@ -869,6 +870,7 @@
"simple-git": "1.116.0",
"sinon": "^7.4.2",
"sort-package-json": "^1.53.1",
"source-map": "^0.7.3",
"spawn-sync": "^1.0.15",
"string-replace-loader": "^2.2.0",
"strong-log-transformer": "^2.1.0",

View file

@ -66,6 +66,7 @@ filegroup(
"//packages/kbn-test-subj-selector:build",
"//packages/kbn-timelion-grammar:build",
"//packages/kbn-tinymath:build",
"//packages/kbn-type-summarizer:build",
"//packages/kbn-typed-react-router-config:build",
"//packages/kbn-ui-framework:build",
"//packages/kbn-ui-shared-deps-npm:build",
@ -132,6 +133,7 @@ filegroup(
"//packages/kbn-telemetry-tools:build_types",
"//packages/kbn-test:build_types",
"//packages/kbn-test-jest-helpers:build_types",
"//packages/kbn-type-summarizer:build_types",
"//packages/kbn-typed-react-router-config:build_types",
"//packages/kbn-ui-shared-deps-npm:build_types",
"//packages/kbn-ui-shared-deps-src:build_types",

View file

@ -61,6 +61,7 @@ ts_project(
srcs = SRCS,
deps = TYPES_DEPS,
declaration = True,
declaration_map = True,
emit_declaration_only = True,
out_dir = "target_types",
root_dir = "src",

View file

@ -2,6 +2,7 @@
"extends": "../../tsconfig.bazel.json",
"compilerOptions": {
"declaration": true,
"declarationMap": true,
"emitDeclarationOnly": true,
"outDir": "./target_types",
"rootDir": "src",

View file

@ -9,6 +9,8 @@
// For a detailed explanation regarding each configuration property, visit:
// https://jestjs.io/docs/en/configuration.html
/** @typedef {import("@jest/types").Config.InitialOptions} JestConfig */
/** @type {JestConfig} */
module.exports = {
// The directory where Jest should output its coverage files
coverageDirectory: '<rootDir>/target/kibana-coverage/jest',
@ -128,4 +130,6 @@ module.exports = {
// A custom resolver to preserve symlinks by default
resolver: '<rootDir>/node_modules/@kbn/test/target_node/jest/setup/preserve_symlinks_resolver.js',
watchPathIgnorePatterns: ['.*/__tmp__/.*'],
};

View file

@ -0,0 +1,136 @@
load("@npm//@bazel/typescript:index.bzl", "ts_config")
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
load("@build_bazel_rules_nodejs//internal/node:node.bzl", "nodejs_binary")
load("@build_bazel_rules_nodejs//:index.bzl", "directory_file_path")
PKG_BASE_NAME = "kbn-type-summarizer"
PKG_REQUIRE_NAME = "@kbn/type-summarizer"
SOURCE_FILES = glob(
[
"src/**/*.ts",
],
exclude = [
"**/*.test.*"
],
)
SRCS = SOURCE_FILES
filegroup(
name = "srcs",
srcs = SRCS,
)
NPM_MODULE_EXTRA_FILES = [
"package.json",
"README.md",
]
RUNTIME_DEPS = [
"@npm//@babel/runtime",
"@npm//@microsoft/api-extractor",
"@npm//source-map-support",
"@npm//chalk",
"@npm//getopts",
"@npm//is-path-inside",
"@npm//normalize-path",
"@npm//source-map",
"@npm//tslib",
]
TYPES_DEPS = [
"@npm//@microsoft/api-extractor",
"@npm//@types/jest",
"@npm//@types/node",
"@npm//@types/normalize-path",
"@npm//getopts",
"@npm//is-path-inside",
"@npm//normalize-path",
"@npm//source-map",
"@npm//tslib",
]
ts_config(
name = "tsconfig",
src = "tsconfig.json",
deps = [
"//:tsconfig.base.json",
"//:tsconfig.bazel.json",
],
)
ts_project(
name = "tsc_types",
args = ['--pretty'],
srcs = SRCS,
deps = TYPES_DEPS,
declaration = True,
declaration_map = True,
emit_declaration_only = True,
out_dir = "target_types",
root_dir = "src",
tsconfig = ":tsconfig",
)
jsts_transpiler(
name = "target_node",
srcs = SRCS,
build_pkg_name = package_name(),
)
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
directory_file_path(
name = "bazel-cli-path",
directory = ":target_node",
path = "bazel_cli.js",
)
nodejs_binary(
name = "bazel-cli",
data = [
":%s" % PKG_BASE_NAME
],
entry_point = ":bazel-cli-path",
visibility = ["//visibility:public"],
)
pkg_npm(
name = "npm_module",
deps = [
":%s" % PKG_BASE_NAME,
],
)
filegroup(
name = "build",
srcs = [
":npm_module",
],
visibility = ["//visibility:public"],
)
pkg_npm_types(
name = "npm_module_types",
srcs = SRCS,
deps = [":tsc_types"],
package_name = PKG_REQUIRE_NAME,
tsconfig = ":tsconfig",
visibility = ["//visibility:public"],
)
filegroup(
name = "build_types",
srcs = [
":npm_module_types",
],
visibility = ["//visibility:public"],
)

View file

@ -0,0 +1,17 @@
# @kbn/type-summarizer
Consume the .d.ts files for a package, produced by `tsc`, and generate a single `.d.ts` file of the public types along with a source map that points back to the original source.
## You mean like API Extractor?
Yeah, except with source map support and without all the legacy features and other features we disable to generate our current type summaries.
I first attempted to implement this in api-extractor but I (@spalger) hit a wall when dealing with the `Span` class. This class handles all the text output which ends up becoming source code, and I wasn't able to find a way to associate specific spans with source locations without getting 12 headaches. Instead I decided to try implementing this from scratch, reducing our reliance on the api-extractor project and putting us in control of how we generate type summaries.
This package is missing some critical features for wider adoption, but rather than build the entire product in a branch I decided to implement support for a small number of TS features and put this to use in the `@kbn/crypto` module ASAP.
The plan is to expand to other packages in the Kibana repo, adding support for language features as we go.
## Something isn't working and I'm blocked!
If there's a problem with the implmentation blocking another team at any point we can move the package back to using api-extractor by removing the package from the `TYPE_SUMMARIZER_PACKAGES` list at the top of [packages/kbn-type-summarizer/src/lib/bazel_cli_config.ts](./src/lib/bazel_cli_config.ts).

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/** @typedef {import("@jest/types").Config.InitialOptions} JestConfig */
/** @type {JestConfig} */
module.exports = {
preset: '@kbn/test/jest_node',
rootDir: '../..',
roots: ['<rootDir>/packages/kbn-type-summarizer'],
};

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/** @typedef {import("@jest/types").Config.InitialOptions} JestConfig */
/** @type {JestConfig} */
module.exports = {
preset: '@kbn/test/jest_integration_node',
rootDir: '../..',
roots: ['<rootDir>/packages/kbn-type-summarizer'],
};

View file

@ -0,0 +1,7 @@
{
"name": "@kbn/type-summarizer",
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target_node/index.js",
"private": true
}

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Fsp from 'fs/promises';
import Path from 'path';
import { run } from './lib/run';
import { parseBazelCliConfig } from './lib/bazel_cli_config';
import { summarizePackage } from './summarize_package';
import { runApiExtractor } from './run_api_extractor';
const HELP = `
Script called from bazel to create the summarized version of a package. When called by Bazel
config is passed as a JSON encoded object.
When called via "node scripts/build_type_summarizer_output" pass a path to a package and that
package's types will be read from node_modules and written to data/type-summarizer-output.
`;
run(
async ({ argv, log }) => {
log.debug('cwd:', process.cwd());
log.debug('argv', process.argv);
const config = parseBazelCliConfig(argv);
await Fsp.mkdir(config.outputDir, { recursive: true });
// generate pkg json output
await Fsp.writeFile(
Path.resolve(config.outputDir, 'package.json'),
JSON.stringify(
{
name: `@types/${config.packageName.replaceAll('@', '').replaceAll('/', '__')}`,
description: 'Generated by @kbn/type-summarizer',
types: './index.d.ts',
private: true,
license: 'MIT',
version: '1.1.0',
},
null,
2
)
);
if (config.use === 'type-summarizer') {
await summarizePackage(log, {
dtsDir: Path.dirname(config.inputPath),
inputPaths: [config.inputPath],
outputDir: config.outputDir,
tsconfigPath: config.tsconfigPath,
repoRelativePackageDir: config.repoRelativePackageDir,
});
log.success('type summary created for', config.repoRelativePackageDir);
} else {
await runApiExtractor(
config.tsconfigPath,
config.inputPath,
Path.resolve(config.outputDir, 'index.d.ts')
);
}
},
{
helpText: HELP,
defaultLogLevel: 'quiet',
}
);

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export type { Logger } from './lib/log';
export type { SummarizePacakgeOptions } from './summarize_package';
export { summarizePackage } from './summarize_package';

View file

@ -0,0 +1,151 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import Fs from 'fs';
import { CliError } from './cli_error';
import { parseCliFlags } from './cli_flags';
const TYPE_SUMMARIZER_PACKAGES = ['@kbn/type-summarizer', '@kbn/crypto'];
const isString = (i: any): i is string => typeof i === 'string' && i.length > 0;
interface BazelCliConfig {
packageName: string;
outputDir: string;
tsconfigPath: string;
inputPath: string;
repoRelativePackageDir: string;
use: 'api-extractor' | 'type-summarizer';
}
export function parseBazelCliFlags(argv: string[]): BazelCliConfig {
const { rawFlags, unknownFlags } = parseCliFlags(argv, {
string: ['use'],
default: {
use: 'api-extractor',
},
});
if (unknownFlags.length) {
throw new CliError(`Unknown flags: ${unknownFlags.join(', ')}`, {
showHelp: true,
});
}
let REPO_ROOT;
try {
const name = 'utils';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const utils = require('@kbn/' + name);
REPO_ROOT = utils.REPO_ROOT as string;
} catch (error) {
if (error && error.code === 'MODULE_NOT_FOUND') {
throw new CliError('type-summarizer bazel cli only works after bootstrap');
}
throw error;
}
const [relativePackagePath, ...extraPositional] = rawFlags._;
if (typeof relativePackagePath !== 'string') {
throw new CliError(`missing path to package as first positional argument`, { showHelp: true });
}
if (extraPositional.length) {
throw new CliError(`extra positional arguments`, { showHelp: true });
}
const use = rawFlags.use;
if (use !== 'api-extractor' && use !== 'type-summarizer') {
throw new CliError(`invalid --use flag, expected "api-extractor" or "type-summarizer"`);
}
const packageDir = Path.resolve(relativePackagePath);
const packageName: string = JSON.parse(
Fs.readFileSync(Path.join(packageDir, 'package.json'), 'utf8')
).name;
const repoRelativePackageDir = Path.relative(REPO_ROOT, packageDir);
return {
use,
packageName,
tsconfigPath: Path.join(REPO_ROOT, repoRelativePackageDir, 'tsconfig.json'),
inputPath: Path.resolve(REPO_ROOT, 'node_modules', packageName, 'target_types/index.d.ts'),
repoRelativePackageDir,
outputDir: Path.resolve(REPO_ROOT, 'data/type-summarizer-output', use),
};
}
export function parseBazelCliJson(json: string): BazelCliConfig {
let config;
try {
config = JSON.parse(json);
} catch (error) {
throw new CliError('unable to parse first positional argument as JSON');
}
if (typeof config !== 'object' || config === null) {
throw new CliError('config JSON must be an object');
}
const packageName = config.packageName;
if (!isString(packageName)) {
throw new CliError('packageName config must be a non-empty string');
}
const outputDir = config.outputDir;
if (!isString(outputDir)) {
throw new CliError('outputDir config must be a non-empty string');
}
if (Path.isAbsolute(outputDir)) {
throw new CliError(`outputDir [${outputDir}] must be a relative path`);
}
const tsconfigPath = config.tsconfigPath;
if (!isString(tsconfigPath)) {
throw new CliError('tsconfigPath config must be a non-empty string');
}
if (Path.isAbsolute(tsconfigPath)) {
throw new CliError(`tsconfigPath [${tsconfigPath}] must be a relative path`);
}
const inputPath = config.inputPath;
if (!isString(inputPath)) {
throw new CliError('inputPath config must be a non-empty string');
}
if (Path.isAbsolute(inputPath)) {
throw new CliError(`inputPath [${inputPath}] must be a relative path`);
}
const buildFilePath = config.buildFilePath;
if (!isString(buildFilePath)) {
throw new CliError('buildFilePath config must be a non-empty string');
}
if (Path.isAbsolute(buildFilePath)) {
throw new CliError(`buildFilePath [${buildFilePath}] must be a relative path`);
}
const repoRelativePackageDir = Path.dirname(buildFilePath);
return {
packageName,
outputDir: Path.resolve(outputDir),
tsconfigPath: Path.resolve(tsconfigPath),
inputPath: Path.resolve(inputPath),
repoRelativePackageDir,
use: TYPE_SUMMARIZER_PACKAGES.includes(packageName) ? 'type-summarizer' : 'api-extractor',
};
}
export function parseBazelCliConfig(argv: string[]) {
if (typeof argv[0] === 'string' && argv[0].startsWith('{')) {
return parseBazelCliJson(argv[0]);
}
return parseBazelCliFlags(argv);
}

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export interface CliErrorOptions {
exitCode?: number;
showHelp?: boolean;
}
export class CliError extends Error {
public readonly exitCode: number;
public readonly showHelp: boolean;
constructor(message: string, options: CliErrorOptions = {}) {
super(message);
this.exitCode = options.exitCode ?? 1;
this.showHelp = options.showHelp ?? false;
}
}

View file

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import getopts from 'getopts';
interface ParseCliFlagsOptions {
alias?: Record<string, string>;
boolean?: string[];
string?: string[];
default?: Record<string, unknown>;
}
export function parseCliFlags(argv = process.argv.slice(2), options: ParseCliFlagsOptions = {}) {
const unknownFlags: string[] = [];
const string = options.string ?? [];
const boolean = ['help', 'verbose', 'debug', 'quiet', 'silent', ...(options.boolean ?? [])];
const alias = {
v: 'verbose',
d: 'debug',
h: 'help',
...options.alias,
};
const rawFlags = getopts(argv, {
alias,
boolean,
string,
default: options.default,
unknown(name) {
unknownFlags.push(name);
return false;
},
});
return {
rawFlags,
unknownFlags,
};
}

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
import { ValueNode, ExportFromDeclaration } from '../ts_nodes';
import { ResultValue } from './result_value';
import { ImportedSymbols } from './imported_symbols';
import { Reference, ReferenceKey } from './reference';
import { SourceMapper } from '../source_mapper';
export type CollectorResult = Reference | ImportedSymbols | ResultValue;
export class CollectorResults {
imports: ImportedSymbols[] = [];
importsByPath = new Map<string, ImportedSymbols>();
nodes: ResultValue[] = [];
nodesByAst = new Map<ValueNode, ResultValue>();
constructor(private readonly sourceMapper: SourceMapper) {}
addNode(exported: boolean, node: ValueNode) {
const existing = this.nodesByAst.get(node);
if (existing) {
existing.exported = existing.exported || exported;
return;
}
const result = new ResultValue(exported, node);
this.nodesByAst.set(node, result);
this.nodes.push(result);
}
ensureExported(node: ValueNode) {
this.addNode(true, node);
}
addImport(
exported: boolean,
node: ts.ImportDeclaration | ExportFromDeclaration,
symbol: ts.Symbol
) {
const literal = node.moduleSpecifier;
if (!ts.isStringLiteral(literal)) {
throw new Error('import statement with non string literal module identifier');
}
const existing = this.importsByPath.get(literal.text);
if (existing) {
existing.symbols.push(symbol);
return;
}
const result = new ImportedSymbols(exported, node, [symbol]);
this.importsByPath.set(literal.text, result);
this.imports.push(result);
}
private getReferencesFromNodes() {
// collect the references from all the sourcefiles of all the resulting nodes
const sourceFiles = new Set<ts.SourceFile>();
for (const { node } of this.nodes) {
sourceFiles.add(this.sourceMapper.getSourceFile(node));
}
const references: Record<ReferenceKey, Set<string>> = {
lib: new Set(),
types: new Set(),
};
for (const sourceFile of sourceFiles) {
for (const ref of sourceFile.libReferenceDirectives) {
references.lib.add(ref.fileName);
}
for (const ref of sourceFile.typeReferenceDirectives) {
references.types.add(ref.fileName);
}
}
return [
...Array.from(references.lib).map((name) => new Reference('lib', name)),
...Array.from(references.types).map((name) => new Reference('types', name)),
];
}
getAll(): CollectorResult[] {
return [...this.getReferencesFromNodes(), ...this.imports, ...this.nodes];
}
}

View file

@ -0,0 +1,209 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
import { Logger } from '../log';
import {
assertExportedValueNode,
isExportedValueNode,
DecSymbol,
assertDecSymbol,
toDecSymbol,
ExportFromDeclaration,
isExportFromDeclaration,
isAliasSymbol,
} from '../ts_nodes';
import { ExportInfo } from '../export_info';
import { CollectorResults } from './collector_results';
import { SourceMapper } from '../source_mapper';
import { isNodeModule } from '../is_node_module';
interface ResolvedNmImport {
type: 'import';
node: ts.ImportDeclaration | ExportFromDeclaration;
targetPath: string;
}
interface ResolvedSymbol {
type: 'symbol';
symbol: DecSymbol;
}
export class ExportCollector {
constructor(
private readonly log: Logger,
private readonly typeChecker: ts.TypeChecker,
private readonly sourceFile: ts.SourceFile,
private readonly dtsDir: string,
private readonly sourceMapper: SourceMapper
) {}
private getParentImport(
symbol: DecSymbol
): ts.ImportDeclaration | ExportFromDeclaration | undefined {
for (const node of symbol.declarations) {
let cursor: ts.Node = node;
while (true) {
if (ts.isImportDeclaration(cursor) || isExportFromDeclaration(cursor)) {
return cursor;
}
if (ts.isSourceFile(cursor)) {
break;
}
cursor = cursor.parent;
}
}
}
private getAllChildSymbols(
node: ts.Node,
results = new Set<DecSymbol>(),
seen = new Set<ts.Node>()
) {
node.forEachChild((child) => {
const childSymbol = this.typeChecker.getSymbolAtLocation(child);
if (childSymbol) {
results.add(toDecSymbol(childSymbol));
}
if (!seen.has(child)) {
seen.add(child);
this.getAllChildSymbols(child, results, seen);
}
});
return results;
}
private resolveAliasSymbolStep(alias: ts.Symbol): DecSymbol {
// get the symbol this symbol references
const aliased = this.typeChecker.getImmediateAliasedSymbol(alias);
if (!aliased) {
throw new Error(`symbol [${alias.escapedName}] is an alias without aliased symbol`);
}
assertDecSymbol(aliased);
return aliased;
}
private getImportFromNodeModules(symbol: DecSymbol): undefined | ResolvedNmImport {
const parentImport = this.getParentImport(symbol);
if (parentImport) {
// this symbol is within an import statement, is it an import from a node_module?
const aliased = this.resolveAliasSymbolStep(symbol);
// symbol is in an import or export-from statement, make sure we want to traverse to that file
const targetPaths = [
...new Set(aliased.declarations.map((d) => this.sourceMapper.getSourceFile(d).fileName)),
];
if (targetPaths.length > 1) {
throw new Error('importing a symbol from multiple locations is unsupported at this time');
}
const targetPath = targetPaths[0];
if (isNodeModule(this.dtsDir, targetPath)) {
return {
type: 'import',
node: parentImport,
targetPath,
};
}
}
}
private resolveAliasSymbol(alias: DecSymbol): ResolvedNmImport | ResolvedSymbol {
let symbol = alias;
while (isAliasSymbol(symbol)) {
const nmImport = this.getImportFromNodeModules(symbol);
if (nmImport) {
return nmImport;
}
symbol = this.resolveAliasSymbolStep(symbol);
}
return {
type: 'symbol',
symbol,
};
}
private traversedSymbols = new Set<DecSymbol>();
private collectResults(
results: CollectorResults,
exportInfo: ExportInfo | undefined,
symbol: DecSymbol
): void {
const seen = this.traversedSymbols.has(symbol);
if (seen && !exportInfo) {
return;
}
this.traversedSymbols.add(symbol);
const source = this.resolveAliasSymbol(symbol);
if (source.type === 'import') {
results.addImport(!!exportInfo, source.node, symbol);
return;
}
symbol = source.symbol;
if (seen) {
for (const node of symbol.declarations) {
assertExportedValueNode(node);
results.ensureExported(node);
}
return;
}
const globalDecs: ts.Declaration[] = [];
const localDecs: ts.Declaration[] = [];
for (const node of symbol.declarations) {
const sourceFile = this.sourceMapper.getSourceFile(node);
(isNodeModule(this.dtsDir, sourceFile.fileName) ? globalDecs : localDecs).push(node);
}
if (globalDecs.length) {
this.log.debug(
`Ignoring ${globalDecs.length} global declarations for "${source.symbol.escapedName}"`
);
}
for (const node of localDecs) {
// iterate through the child nodes to find nodes we need to export to make this useful
const childSymbols = this.getAllChildSymbols(node);
childSymbols.delete(symbol);
for (const childSymbol of childSymbols) {
this.collectResults(results, undefined, childSymbol);
}
if (isExportedValueNode(node)) {
results.addNode(!!exportInfo, node);
}
}
}
run(): CollectorResults {
const results = new CollectorResults(this.sourceMapper);
const moduleSymbol = this.typeChecker.getSymbolAtLocation(this.sourceFile);
if (!moduleSymbol) {
this.log.warn('Source file has no symbol in the type checker, is it empty?');
return results;
}
for (const symbol of this.typeChecker.getExportsOfModule(moduleSymbol)) {
assertDecSymbol(symbol);
this.collectResults(results, new ExportInfo(`${symbol.escapedName}`), symbol);
}
return results;
}
}

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
import { ExportFromDeclaration } from '../ts_nodes';
export class ImportedSymbols {
type = 'import' as const;
constructor(
public readonly exported: boolean,
public readonly importNode: ts.ImportDeclaration | ExportFromDeclaration,
// TODO: I'm going to need to keep track of local names for these... unless that's embedded in the symbols
public readonly symbols: ts.Symbol[]
) {}
}

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './exports_collector';
export * from './collector_results';

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export type ReferenceKey = 'types' | 'lib';
export class Reference {
type = 'reference' as const;
constructor(public readonly key: ReferenceKey, public readonly name: string) {}
}

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { ValueNode } from '../ts_nodes';
export class ResultValue {
type = 'value' as const;
constructor(public exported: boolean, public readonly node: ValueNode) {}
}

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export class ExportInfo {
constructor(public readonly name: string) {}
}

View file

@ -0,0 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export function toError(thrown: unknown) {
if (thrown instanceof Error) {
return thrown;
}
return new Error(`${thrown} thrown`);
}
export function isSystemError(error: Error): error is NodeJS.ErrnoException {
return typeof (error as any).code === 'string';
}

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Fsp from 'fs/promises';
import { toError, isSystemError } from './error';
export async function tryReadFile(
path: string,
encoding: 'utf-8' | 'utf8'
): Promise<string | undefined>;
export async function tryReadFile(path: string, encoding?: BufferEncoding) {
try {
return await Fsp.readFile(path, encoding);
} catch (_) {
const error = toError(_);
if (isSystemError(error) && error.code === 'ENOENT') {
return undefined;
}
throw error;
}
}

View file

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { parseJson } from './json';
it('parses JSON', () => {
expect(parseJson('{"foo": "bar"}')).toMatchInlineSnapshot(`
Object {
"foo": "bar",
}
`);
});
it('throws more helpful errors', () => {
expect(() => parseJson('{"foo": bar}')).toThrowErrorMatchingInlineSnapshot(
`"Failed to parse JSON: Unexpected token b in JSON at position 8"`
);
});

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { toError } from './error';
export function parseJson(json: string, from?: string) {
try {
return JSON.parse(json);
} catch (_) {
const error = toError(_);
throw new Error(`Failed to parse JSON${from ? ` from ${from}` : ''}: ${error.message}`);
}
}

View file

@ -0,0 +1,17 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import isPathInside from 'is-path-inside';
export function isNodeModule(dtsDir: string, path: string) {
return (isPathInside(path, dtsDir) ? Path.relative(dtsDir, path) : path)
.split(Path.sep)
.includes('node_modules');
}

View file

@ -0,0 +1,99 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { format } from 'util';
import { dim, blueBright, yellowBright, redBright, gray } from 'chalk';
import getopts from 'getopts';
import { Logger } from './logger';
const LOG_LEVEL_RANKS = {
silent: 0,
quiet: 1,
info: 2,
debug: 3,
verbose: 4,
};
export type LogLevel = keyof typeof LOG_LEVEL_RANKS;
const LOG_LEVELS = (Object.keys(LOG_LEVEL_RANKS) as LogLevel[]).sort(
(a, b) => LOG_LEVEL_RANKS[a] - LOG_LEVEL_RANKS[b]
);
const LOG_LEVELS_DESC = LOG_LEVELS.slice().reverse();
type LogLevelMap = { [k in LogLevel]: boolean };
export interface LogWriter {
write(chunk: string): void;
}
export class CliLog implements Logger {
static parseLogLevel(level: LogLevel) {
if (!LOG_LEVELS.includes(level)) {
throw new Error('invalid log level');
}
const rank = LOG_LEVEL_RANKS[level];
return Object.fromEntries(
LOG_LEVELS.map((l) => [l, LOG_LEVEL_RANKS[l] <= rank])
) as LogLevelMap;
}
static pickLogLevelFromFlags(
flags: getopts.ParsedOptions,
defaultLogLevl: LogLevel = 'info'
): LogLevel {
for (const level of LOG_LEVELS_DESC) {
if (Object.prototype.hasOwnProperty.call(flags, level) && flags[level] === true) {
return level;
}
}
return defaultLogLevl;
}
private readonly map: LogLevelMap;
constructor(public readonly level: LogLevel, private readonly writeTo: LogWriter) {
this.map = CliLog.parseLogLevel(level);
}
info(msg: string, ...args: any[]) {
if (this.map.info) {
this.writeTo.write(`${blueBright('info')} ${format(msg, ...args)}\n`);
}
}
warn(msg: string, ...args: any[]) {
if (this.map.quiet) {
this.writeTo.write(`${yellowBright('warning')} ${format(msg, ...args)}\n`);
}
}
error(msg: string, ...args: any[]) {
if (this.map.quiet) {
this.writeTo.write(`${redBright('error')} ${format(msg, ...args)}\n`);
}
}
debug(msg: string, ...args: any[]) {
if (this.map.debug) {
this.writeTo.write(`${gray('debug')} ${format(msg, ...args)}\n`);
}
}
verbose(msg: string, ...args: any[]) {
if (this.map.verbose) {
this.writeTo.write(`${dim('verbose')}: ${format(msg, ...args)}\n`);
}
}
success(msg: string, ...args: any[]): void {
if (this.map.quiet) {
this.writeTo.write(`${format(msg, ...args)}\n`);
}
}
}

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './logger';
export * from './cli_log';
export * from './test_log';

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/**
* Logger interface used by @kbn/type-summarizer
*/
export interface Logger {
/**
* Write a message to the log with the level "info"
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
info(msg: string, ...args: any[]): void;
/**
* Write a message to the log with the level "warn"
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
warn(msg: string, ...args: any[]): void;
/**
* Write a message to the log with the level "error"
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
error(msg: string, ...args: any[]): void;
/**
* Write a message to the log with the level "debug"
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
debug(msg: string, ...args: any[]): void;
/**
* Write a message to the log with the level "verbose"
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
verbose(msg: string, ...args: any[]): void;
/**
* Write a message to the log, only excluded in silent mode
* @param msg any message
* @param args any serializeable values you would like to be appended to the log message
*/
success(msg: string, ...args: any[]): void;
}

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { CliLog, LogLevel } from './cli_log';
export class TestLog extends CliLog {
messages: string[] = [];
constructor(level: LogLevel = 'verbose') {
super(level, {
write: (chunk) => {
this.messages.push(chunk);
},
});
}
}

View file

@ -0,0 +1,362 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import * as ts from 'typescript';
import { SourceNode, CodeWithSourceMap } from 'source-map';
import { findKind } from './ts_nodes';
import { SourceMapper } from './source_mapper';
import { CollectorResult } from './export_collector';
type SourceNodes = Array<string | SourceNode>;
const COMMENT_TRIM = /^(\s+)(\/\*|\*|\/\/)/;
export class Printer {
private readonly tsPrint = ts.createPrinter({
newLine: ts.NewLineKind.LineFeed,
noEmitHelpers: true,
omitTrailingSemicolon: false,
removeComments: true,
});
constructor(
private readonly sourceMapper: SourceMapper,
private readonly results: CollectorResult[],
private readonly outputPath: string,
private readonly mapOutputPath: string,
private readonly sourceRoot: string,
private readonly strict: boolean
) {}
async print(): Promise<CodeWithSourceMap> {
const file = new SourceNode(
null,
null,
null,
this.results.flatMap((r) => {
if (r.type === 'reference') {
return `/// <reference ${r.key}="${r.name}" />\n`;
}
if (r.type === 'import') {
// TODO: handle default imports, imports with alternate names, etc
return `import { ${r.symbols
.map((s) => s.escapedName)
.join(', ')} } from ${r.importNode.moduleSpecifier.getText()};\n`;
}
return this.toSourceNodes(r.node, r.exported);
})
);
const outputDir = Path.dirname(this.outputPath);
const mapOutputDir = Path.dirname(this.mapOutputPath);
const output = file.toStringWithSourceMap({
file: Path.relative(mapOutputDir, this.outputPath),
sourceRoot: this.sourceRoot,
});
const nl = output.code.endsWith('\n') ? '' : '\n';
const sourceMapPathRel = Path.relative(outputDir, this.mapOutputPath);
output.code += `${nl}//# sourceMappingURL=${sourceMapPathRel}`;
return output;
}
private getDeclarationKeyword(node: ts.Declaration) {
if (node.kind === ts.SyntaxKind.FunctionDeclaration) {
return 'function';
}
if (node.kind === ts.SyntaxKind.TypeAliasDeclaration) {
return 'type';
}
if (node.kind === ts.SyntaxKind.ClassDeclaration) {
return 'class';
}
if (node.kind === ts.SyntaxKind.InterfaceDeclaration) {
return 'interface';
}
if (ts.isVariableDeclaration(node)) {
return this.getVariableDeclarationType(node);
}
}
private printModifiers(exported: boolean, node: ts.Declaration) {
const flags = ts.getCombinedModifierFlags(node);
const modifiers: string[] = [];
if (exported) {
modifiers.push('export');
}
if (flags & ts.ModifierFlags.Default) {
modifiers.push('default');
}
if (flags & ts.ModifierFlags.Abstract) {
modifiers.push('abstract');
}
if (flags & ts.ModifierFlags.Private) {
modifiers.push('private');
}
if (flags & ts.ModifierFlags.Public) {
modifiers.push('public');
}
if (flags & ts.ModifierFlags.Static) {
modifiers.push('static');
}
if (flags & ts.ModifierFlags.Readonly) {
modifiers.push('readonly');
}
if (flags & ts.ModifierFlags.Const) {
modifiers.push('const');
}
if (flags & ts.ModifierFlags.Async) {
modifiers.push('async');
}
const keyword = this.getDeclarationKeyword(node);
if (keyword) {
modifiers.push(keyword);
}
return `${modifiers.join(' ')} `;
}
private printNode(node: ts.Node) {
return this.tsPrint.printNode(
ts.EmitHint.Unspecified,
node,
this.sourceMapper.getSourceFile(node)
);
}
private ensureNewline(string: string): string;
private ensureNewline(string: SourceNodes): SourceNodes;
private ensureNewline(string: string | SourceNodes): string | SourceNodes {
if (typeof string === 'string') {
return string.endsWith('\n') ? string : `${string}\n`;
}
const end = string.at(-1);
if (end === undefined) {
return [];
}
const valid = (typeof end === 'string' ? end : end.toString()).endsWith('\n');
return valid ? string : [...string, '\n'];
}
private getMappedSourceNode(node: ts.Node, code?: string) {
return this.sourceMapper.getSourceNode(node, code ?? node.getText());
}
private getVariableDeclarationList(node: ts.VariableDeclaration) {
const list = node.parent;
if (!ts.isVariableDeclarationList(list)) {
const kind = findKind(list);
throw new Error(
`expected parent of variable declaration to be a VariableDeclarationList, got [${kind}]`
);
}
return list;
}
private getVariableDeclarationType(node: ts.VariableDeclaration) {
const flags = ts.getCombinedNodeFlags(this.getVariableDeclarationList(node));
if (flags & ts.NodeFlags.Const) {
return 'const';
}
if (flags & ts.NodeFlags.Let) {
return 'let';
}
return 'var';
}
private getSourceWithLeadingComments(node: ts.Node) {
// variable declarations regularly have leading comments but they're two-parents up, so we have to handle them separately
if (!ts.isVariableDeclaration(node)) {
return node.getFullText();
}
const list = this.getVariableDeclarationList(node);
if (list.declarations.length > 1) {
return node.getFullText();
}
const statement = list.parent;
if (!ts.isVariableStatement(statement)) {
throw new Error('expected parent of VariableDeclarationList to be a VariableStatement');
}
return statement.getFullText();
}
private getLeadingComments(node: ts.Node, indentWidth = 0): string[] {
const fullText = this.getSourceWithLeadingComments(node);
const ranges = ts.getLeadingCommentRanges(fullText, 0);
if (!ranges) {
return [];
}
const indent = ' '.repeat(indentWidth);
return ranges.flatMap((range) => {
const comment = fullText
.slice(range.pos, range.end)
.split('\n')
.map((line) => {
const match = line.match(COMMENT_TRIM);
if (!match) {
return line;
}
const [, spaces, type] = match;
return line.slice(type === '*' ? spaces.length - 1 : spaces.length);
})
.map((line) => `${indent}${line}`)
.join('\n');
if (comment.startsWith('/// <reference')) {
return [];
}
return comment + (range.hasTrailingNewLine ? '\n' : '');
});
}
private printTypeParameters(
node:
| ts.ClassDeclaration
| ts.InterfaceDeclaration
| ts.FunctionDeclaration
| ts.TypeAliasDeclaration
) {
const typeParams = node.typeParameters;
if (!typeParams || !typeParams.length) {
return '';
}
return `<${typeParams.map((p) => this.printNode(p)).join(', ')}>`;
}
private toSourceNodes(node: ts.Node, exported = false): SourceNodes {
switch (node.kind) {
case ts.SyntaxKind.LiteralType:
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.BigIntLiteral:
case ts.SyntaxKind.NumericLiteral:
case ts.SyntaxKind.StringKeyword:
return [this.printNode(node)];
}
if (ts.isFunctionDeclaration(node)) {
// we are just trying to replace the name with a sourceMapped node, so if there
// is no name just return the source
if (!node.name) {
return [node.getFullText()];
}
return [
this.getLeadingComments(node),
this.printModifiers(exported, node),
this.getMappedSourceNode(node.name),
this.printTypeParameters(node),
`(${node.parameters.map((p) => p.getFullText()).join(', ')})`,
node.type ? [': ', this.printNode(node.type), ';'] : ';',
].flat();
}
if (ts.isInterfaceDeclaration(node)) {
const text = node.getText();
const name = node.name.getText();
const nameI = text.indexOf(name);
if (nameI === -1) {
throw new Error(`printed version of interface does not include name [${name}]: ${text}`);
}
return [
...this.getLeadingComments(node),
text.slice(0, nameI),
this.getMappedSourceNode(node.name, name),
text.slice(nameI + name.length),
'\n',
];
}
if (ts.isVariableDeclaration(node)) {
return [
...this.getLeadingComments(node),
this.printModifiers(exported, node),
this.getMappedSourceNode(node.name),
...(node.type ? [': ', this.printNode(node.type)] : []),
';\n',
];
}
if (ts.isUnionTypeNode(node)) {
return node.types.flatMap((type, i) =>
i > 0 ? [' | ', ...this.toSourceNodes(type)] : this.toSourceNodes(type)
);
}
if (ts.isTypeAliasDeclaration(node)) {
return [
...this.getLeadingComments(node),
this.printModifiers(exported, node),
this.getMappedSourceNode(node.name),
this.printTypeParameters(node),
' = ',
this.ensureNewline(this.toSourceNodes(node.type)),
].flat();
}
if (ts.isClassDeclaration(node)) {
return [
...this.getLeadingComments(node),
this.printModifiers(exported, node),
node.name ? this.getMappedSourceNode(node.name) : [],
this.printTypeParameters(node),
' {\n',
node.members.flatMap((m) => {
const memberText = m.getText();
if (ts.isConstructorDeclaration(m)) {
return ` ${memberText}\n`;
}
if (!m.name) {
return ` ${memberText}\n`;
}
const nameText = m.name.getText();
const pos = memberText.indexOf(nameText);
if (pos === -1) {
return ` ${memberText}\n`;
}
const left = memberText.slice(0, pos);
const right = memberText.slice(pos + nameText.length);
const nameNode = this.getMappedSourceNode(m.name, nameText);
return [...this.getLeadingComments(m, 2), ` `, left, nameNode, right, `\n`];
}),
'}\n',
].flat();
}
if (!this.strict) {
return [this.ensureNewline(this.printNode(node))];
} else {
throw new Error(`unable to print export type of kind [${findKind(node)}]`);
}
}
}

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import getopts from 'getopts';
import { CliLog, LogLevel } from './log';
import { toError } from './helpers/error';
import { CliError } from './cli_error';
export interface RunContext {
argv: string[];
log: CliLog;
}
export interface RunOptions {
helpText: string;
defaultLogLevel?: LogLevel;
}
export async function run(main: (ctx: RunContext) => Promise<void>, options: RunOptions) {
const argv = process.argv.slice(2);
const rawFlags = getopts(argv);
const log = new CliLog(
CliLog.pickLogLevelFromFlags(rawFlags, options.defaultLogLevel),
process.stdout
);
try {
await main({ argv, log });
} catch (_) {
const error = toError(_);
if (error instanceof CliError) {
process.exitCode = error.exitCode;
log.error(error.message);
if (error.showHelp) {
process.stdout.write(options.helpText);
}
} else {
log.error('UNHANDLED ERROR', error.stack);
process.exitCode = 1;
}
}
}

View file

@ -0,0 +1,141 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import * as ts from 'typescript';
import { SourceNode, SourceMapConsumer, BasicSourceMapConsumer } from 'source-map';
import normalizePath from 'normalize-path';
import { Logger } from './log';
import { tryReadFile } from './helpers/fs';
import { parseJson } from './helpers/json';
import { isNodeModule } from './is_node_module';
export class SourceMapper {
static async forSourceFiles(
log: Logger,
dtsDir: string,
repoRelativePackageDir: string,
sourceFiles: readonly ts.SourceFile[]
) {
const consumers = new Map<ts.SourceFile, BasicSourceMapConsumer | undefined>();
await Promise.all(
sourceFiles.map(async (sourceFile) => {
if (isNodeModule(dtsDir, sourceFile.fileName)) {
return;
}
const text = sourceFile.getText();
const match = text.match(/^\/\/#\s*sourceMappingURL=(.*)/im);
if (!match) {
consumers.set(sourceFile, undefined);
return;
}
const relSourceFile = Path.relative(process.cwd(), sourceFile.fileName);
const sourceMapPath = Path.resolve(Path.dirname(sourceFile.fileName), match[1]);
const relSourceMapPath = Path.relative(process.cwd(), sourceMapPath);
const sourceJson = await tryReadFile(sourceMapPath, 'utf8');
if (!sourceJson) {
throw new Error(
`unable to find source map for [${relSourceFile}] expected at [${match[1]}]`
);
}
const json = parseJson(sourceJson, `source map at [${relSourceMapPath}]`);
consumers.set(sourceFile, await new SourceMapConsumer(json));
log.debug('loaded sourcemap for', relSourceFile);
})
);
return new SourceMapper(consumers, repoRelativePackageDir);
}
private readonly sourceFixDir: string;
constructor(
private readonly consumers: Map<ts.SourceFile, BasicSourceMapConsumer | undefined>,
repoRelativePackageDir: string
) {
this.sourceFixDir = Path.join('/', repoRelativePackageDir);
}
/**
* We ensure that `sourceRoot` is not defined in the tsconfig files, and we assume that the `source` value
* for each file in the source map will be a relative path out of the bazel-out dir and to the `repoRelativePackageDir`
* or some path outside of the package in rare situations. Our goal is to convert each of these source paths
* to new path that is relative to the `repoRelativePackageDir` path. To do this we resolve the `repoRelativePackageDir`
* as if it was at the root of the filesystem, then do the same for the `source`, so both paths should be
* absolute, but only include the path segments from the root of the repo. We then get the relative path from
* the absolute version of the `repoRelativePackageDir` to the absolute version of the `source`, which should give
* us the path to the source, relative to the `repoRelativePackageDir`.
*/
fixSourcePath(source: string) {
return normalizePath(Path.relative(this.sourceFixDir, Path.join('/', source)));
}
getSourceNode(generatedNode: ts.Node, code: string) {
const pos = this.findOriginalPosition(generatedNode);
if (pos) {
return new SourceNode(pos.line, pos.column, pos.source, code, pos.name ?? undefined);
}
return new SourceNode(null, null, null, code);
}
sourceFileCache = new WeakMap<ts.Node, ts.SourceFile>();
// abstracted so we can cache this
getSourceFile(node: ts.Node): ts.SourceFile {
if (ts.isSourceFile(node)) {
return node;
}
const cached = this.sourceFileCache.get(node);
if (cached) {
return cached;
}
const sourceFile = this.getSourceFile(node.parent);
this.sourceFileCache.set(node, sourceFile);
return sourceFile;
}
findOriginalPosition(node: ts.Node) {
const dtsSource = this.getSourceFile(node);
if (!this.consumers.has(dtsSource)) {
throw new Error(`sourceFile for [${dtsSource.fileName}] didn't have sourcemaps loaded`);
}
const consumer = this.consumers.get(dtsSource);
if (!consumer) {
return;
}
const posInDts = dtsSource.getLineAndCharacterOfPosition(node.getStart());
const pos = consumer.originalPositionFor({
/* ts line column numbers are 0 based, source map column numbers are also 0 based */
column: posInDts.character,
/* ts line numbers are 0 based, source map line numbers are 1 based */
line: posInDts.line + 1,
});
return {
...pos,
source: pos.source ? this.fixSourcePath(pos.source) : null,
};
}
close() {
for (const consumer of this.consumers.values()) {
consumer?.destroy();
}
}
}

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
export type ValueNode =
| ts.ClassDeclaration
| ts.FunctionDeclaration
| ts.TypeAliasDeclaration
| ts.VariableDeclaration
| ts.InterfaceDeclaration;
export function isExportedValueNode(node: ts.Node): node is ValueNode {
return (
node.kind === ts.SyntaxKind.ClassDeclaration ||
node.kind === ts.SyntaxKind.FunctionDeclaration ||
node.kind === ts.SyntaxKind.TypeAliasDeclaration ||
node.kind === ts.SyntaxKind.VariableDeclaration ||
node.kind === ts.SyntaxKind.InterfaceDeclaration
);
}
export function assertExportedValueNode(node: ts.Node): asserts node is ValueNode {
if (!isExportedValueNode(node)) {
const kind = findKind(node);
throw new Error(`not a valid ExportedValueNode [kind=${kind}]`);
}
}
export function toExportedNodeValue(node: ts.Node): ValueNode {
assertExportedValueNode(node);
return node;
}
export function findKind(node: ts.Node) {
for (const [name, value] of Object.entries(ts.SyntaxKind)) {
if (node.kind === value) {
return name;
}
}
throw new Error('node.kind is not in the SyntaxKind map');
}
export type DecSymbol = ts.Symbol & {
declarations: NonNullable<ts.Symbol['declarations']>;
};
export function isDecSymbol(symbol: ts.Symbol): symbol is DecSymbol {
return !!symbol.declarations;
}
export function assertDecSymbol(symbol: ts.Symbol): asserts symbol is DecSymbol {
if (!isDecSymbol(symbol)) {
throw new Error('symbol has no declarations');
}
}
export function toDecSymbol(symbol: ts.Symbol): DecSymbol {
assertDecSymbol(symbol);
return symbol;
}
export type ExportFromDeclaration = ts.ExportDeclaration & {
moduleSpecifier: NonNullable<ts.ExportDeclaration['moduleSpecifier']>;
};
export function isExportFromDeclaration(node: ts.Node): node is ExportFromDeclaration {
return ts.isExportDeclaration(node) && !!node.moduleSpecifier;
}
export function isAliasSymbol(symbol: ts.Symbol) {
return symbol.flags & ts.SymbolFlags.Alias;
}

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
export function createTsProject(tsConfig: ts.ParsedCommandLine, inputPaths: string[]) {
return ts.createProgram({
rootNames: inputPaths,
options: {
...tsConfig.options,
skipLibCheck: false,
},
projectReferences: tsConfig.projectReferences,
});
}

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as ts from 'typescript';
import Path from 'path';
import { CliError } from './cli_error';
export function readTsConfigFile(path: string) {
const json = ts.readConfigFile(path, ts.sys.readFile);
if (json.error) {
throw new CliError(`Unable to load tsconfig file: ${json.error.messageText}`);
}
return json.config;
}
export function loadTsConfigFile(path: string) {
return ts.parseJsonConfigFileContent(readTsConfigFile(path) ?? {}, ts.sys, Path.dirname(path));
}

View file

@ -0,0 +1,86 @@
/* eslint-disable @kbn/eslint/require-license-header */
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import Fsp from 'fs/promises';
import Path from 'path';
import { Extractor, ExtractorConfig } from '@microsoft/api-extractor';
import { readTsConfigFile } from './lib/tsconfig_file';
import { CliError } from './lib/cli_error';
export async function runApiExtractor(
tsconfigPath: string,
entryPath: string,
dtsBundleOutDir: string
) {
const pkgJson = Path.resolve(Path.dirname(entryPath), 'package.json');
try {
await Fsp.writeFile(
pkgJson,
JSON.stringify({
name: 'GENERATED-BY-BAZEL',
description: 'This is a dummy package.json as API Extractor always requires one.',
types: './index.d.ts',
private: true,
license: 'SSPL-1.0 OR Elastic License 2.0',
version: '1.0.0',
}),
{
flag: 'wx',
}
);
} catch (error) {
if (!error.code || error.code !== 'EEXIST') {
throw error;
}
}
// API extractor doesn't always support the version of TypeScript used in the repo
// example: at the moment it is not compatable with 3.2
// to use the internal TypeScript we shall not create a program but rather pass a parsed tsConfig.
const extractorOptions = {
localBuild: false,
};
const extractorConfig = ExtractorConfig.prepare({
configObject: {
compiler: {
overrideTsconfig: readTsConfigFile(tsconfigPath),
},
projectFolder: Path.dirname(tsconfigPath),
mainEntryPointFilePath: entryPath,
apiReport: {
enabled: false,
// TODO(alan-agius4): remove this folder name when the below issue is solved upstream
// See: https://github.com/microsoft/web-build-tools/issues/1470
reportFileName: 'invalid',
},
docModel: {
enabled: false,
},
dtsRollup: {
enabled: !!dtsBundleOutDir,
untrimmedFilePath: dtsBundleOutDir,
},
tsdocMetadata: {
enabled: false,
},
},
packageJson: undefined,
packageJsonFullPath: pkgJson,
configObjectFullPath: undefined,
});
const { succeeded } = Extractor.invoke(extractorConfig, extractorOptions);
if (!succeeded) {
throw new CliError('api-extractor failed');
}
}

View file

@ -0,0 +1,123 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Fsp from 'fs/promises';
import Path from 'path';
import normalizePath from 'normalize-path';
import { SourceMapper } from './lib/source_mapper';
import { createTsProject } from './lib/ts_project';
import { loadTsConfigFile } from './lib/tsconfig_file';
import { ExportCollector } from './lib/export_collector';
import { isNodeModule } from './lib/is_node_module';
import { Printer } from './lib/printer';
import { Logger } from './lib/log';
/**
* Options used to customize the summarizePackage function
*/
export interface SummarizePacakgeOptions {
/**
* Absolute path to the directory containing the .d.ts files produced by `tsc`. Maps to the
* `declarationDir` compiler option.
*/
dtsDir: string;
/**
* Absolute path to the tsconfig.json file for the project we are summarizing
*/
tsconfigPath: string;
/**
* Array of absolute paths to the .d.ts files which will be summarized. Each file in this
* array will cause an output .d.ts summary file to be created containing all the AST nodes
* which are exported or referenced by those exports.
*/
inputPaths: string[];
/**
* Absolute path to the output directory where the summary .d.ts files should be written
*/
outputDir: string;
/**
* Repo-relative path to the package source, for example `packages/kbn-type-summarizer` for
* this package. This is used to provide the correct `sourceRoot` path in the resulting source
* map files.
*/
repoRelativePackageDir: string;
/**
* Should the printer throw an error if it doesn't know how to print an AST node? Primarily
* used for testing
*/
strictPrinting?: boolean;
}
/**
* Produce summary .d.ts files for a package
*/
export async function summarizePackage(log: Logger, options: SummarizePacakgeOptions) {
const tsConfig = loadTsConfigFile(options.tsconfigPath);
log.verbose('Created tsconfig', tsConfig);
if (tsConfig.options.sourceRoot) {
throw new Error(`${options.tsconfigPath} must not define "compilerOptions.sourceRoot"`);
}
const program = createTsProject(tsConfig, options.inputPaths);
log.verbose('Loaded typescript program');
const typeChecker = program.getTypeChecker();
log.verbose('Typechecker loaded');
const sourceFiles = program
.getSourceFiles()
.filter((f) => !isNodeModule(options.dtsDir, f.fileName))
.sort((a, b) => a.fileName.localeCompare(b.fileName));
const sourceMapper = await SourceMapper.forSourceFiles(
log,
options.dtsDir,
options.repoRelativePackageDir,
sourceFiles
);
// value that will end up as the `sourceRoot` in the final sourceMaps
const sourceRoot = `../../../${normalizePath(options.repoRelativePackageDir)}`;
for (const input of options.inputPaths) {
const outputPath = Path.resolve(options.outputDir, Path.basename(input));
const mapOutputPath = `${outputPath}.map`;
const sourceFile = program.getSourceFile(input);
if (!sourceFile) {
throw new Error(`input file wasn't included in the program`);
}
const results = new ExportCollector(
log,
typeChecker,
sourceFile,
options.dtsDir,
sourceMapper
).run();
const printer = new Printer(
sourceMapper,
results.getAll(),
outputPath,
mapOutputPath,
sourceRoot,
!!options.strictPrinting
);
const summary = await printer.print();
await Fsp.mkdir(options.outputDir, { recursive: true });
await Fsp.writeFile(outputPath, summary.code);
await Fsp.writeFile(mapOutputPath, JSON.stringify(summary.map));
sourceMapper.close();
}
}

View file

@ -0,0 +1,176 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/* eslint-disable no-console */
import Path from 'path';
import Fsp from 'fs/promises';
import * as ts from 'typescript';
import stripAnsi from 'strip-ansi';
import { loadTsConfigFile } from '../src/lib/tsconfig_file';
import { createTsProject } from '../src/lib/ts_project';
import { TestLog } from '../src/lib/log';
import { summarizePackage } from '../src/summarize_package';
const TMP_DIR = Path.resolve(__dirname, '__tmp__');
const DIAGNOSTIC_HOST = {
getCanonicalFileName: (p: string) => p,
getCurrentDirectory: () => process.cwd(),
getNewLine: () => '\n',
};
function dedent(string: string) {
const lines = string.split('\n');
while (lines.length && lines[0].trim() === '') {
lines.shift();
}
if (lines.length === 0) {
return '';
}
const indent = lines[0].split('').findIndex((c) => c !== ' ');
return lines.map((l) => l.slice(indent)).join('\n');
}
function ensureDts(path: string) {
if (path.endsWith('.d.ts')) {
throw new Error('path should end with .ts, not .d.ts');
}
return `${path.slice(0, -3)}.d.ts`;
}
interface Options {
/* Other files which should be available to the test execution */
otherFiles?: Record<string, string>;
}
class MockCli {
/* file contents which will be fed into TypeScript for this test */
public readonly mockFiles: Record<string, string>;
/* directory where mockFiles pretend to be from */
public readonly sourceDir = Path.resolve(TMP_DIR, 'src');
/* directory where we will write .d.ts versions of mockFiles */
public readonly dtsOutputDir = Path.resolve(TMP_DIR, 'dist_dts');
/* directory where output will be written */
public readonly outputDir = Path.resolve(TMP_DIR, 'dts');
/* path where the tsconfig.json file will be written */
public readonly tsconfigPath = Path.resolve(this.sourceDir, 'tsconfig.json');
/* .d.ts file which we will read to discover the types we need to summarize */
public readonly inputPath = ensureDts(Path.resolve(this.dtsOutputDir, 'index.ts'));
/* the location we will write the summarized .d.ts file */
public readonly outputPath = Path.resolve(this.outputDir, Path.basename(this.inputPath));
/* the location we will write the sourcemaps for the summaried .d.ts file */
public readonly mapOutputPath = `${this.outputPath}.map`;
constructor(tsContent: string, options?: Options) {
this.mockFiles = {
...options?.otherFiles,
'index.ts': tsContent,
};
}
private buildDts() {
const program = createTsProject(
loadTsConfigFile(this.tsconfigPath),
Object.keys(this.mockFiles).map((n) => Path.resolve(this.sourceDir, n))
);
this.printDiagnostics(`dts/config`, program.getConfigFileParsingDiagnostics());
this.printDiagnostics(`dts/global`, program.getGlobalDiagnostics());
this.printDiagnostics(`dts/options`, program.getOptionsDiagnostics());
this.printDiagnostics(`dts/semantic`, program.getSemanticDiagnostics());
this.printDiagnostics(`dts/syntactic`, program.getSyntacticDiagnostics());
this.printDiagnostics(`dts/declaration`, program.getDeclarationDiagnostics());
const result = program.emit(undefined, undefined, undefined, true);
this.printDiagnostics('dts/results', result.diagnostics);
}
private printDiagnostics(type: string, diagnostics: readonly ts.Diagnostic[]) {
const errors = diagnostics.filter((d) => d.category === ts.DiagnosticCategory.Error);
if (!errors.length) {
return;
}
const message = ts.formatDiagnosticsWithColorAndContext(errors, DIAGNOSTIC_HOST);
console.error(
`TS Errors (${type}):\n${message
.split('\n')
.map((l) => ` ${l}`)
.join('\n')}`
);
}
async run() {
const log = new TestLog('debug');
// wipe out the tmp dir
await Fsp.rm(TMP_DIR, { recursive: true, force: true });
// write mock files to the filesystem
await Promise.all(
Object.entries(this.mockFiles).map(async ([rel, content]) => {
const path = Path.resolve(this.sourceDir, rel);
await Fsp.mkdir(Path.dirname(path), { recursive: true });
await Fsp.writeFile(path, dedent(content));
})
);
// write tsconfig.json to the filesystem
await Fsp.writeFile(
this.tsconfigPath,
JSON.stringify({
include: [`**/*.ts`, `**/*.tsx`],
compilerOptions: {
moduleResolution: 'node',
target: 'es2021',
module: 'CommonJS',
strict: true,
esModuleInterop: true,
allowSyntheticDefaultImports: true,
declaration: true,
emitDeclarationOnly: true,
declarationDir: '../dist_dts',
declarationMap: true,
// prevent loading all @types packages
typeRoots: [],
},
})
);
// convert the source files to .d.ts files
this.buildDts();
// summarize the .d.ts files into the output dir
await summarizePackage(log, {
dtsDir: this.dtsOutputDir,
inputPaths: [this.inputPath],
outputDir: this.outputDir,
repoRelativePackageDir: 'src',
tsconfigPath: this.tsconfigPath,
strictPrinting: false,
});
// return the results
return {
code: await Fsp.readFile(this.outputPath, 'utf8'),
map: JSON.parse(await Fsp.readFile(this.mapOutputPath, 'utf8')),
logs: stripAnsi(log.messages.join('')),
};
}
}
export async function run(tsContent: string, options?: Options) {
const project = new MockCli(tsContent, options);
return await project.run();
}

View file

@ -0,0 +1,77 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('prints basic class correctly', async () => {
const output = await run(`
/**
* Interface for writin records to a database
*/
interface Db {
write(record: Record<string, unknown>): Promise<void>
}
export class Foo {
/**
* The name of the Foo
*/
public readonly name: string
constructor(name: string) {
this.name = name.toLowerCase()
}
speak() {
alert('hi, my name is ' + this.name)
}
async save(db: Db) {
await db.write({
name: this.name
})
}
}
`);
expect(output.code).toMatchInlineSnapshot(`
"/**
* Interface for writin records to a database
*/
interface Db {
write(record: Record<string, unknown>): Promise<void>;
}
export class Foo {
/**
* The name of the Foo
*/
readonly name: string;
constructor(name: string);
speak(): void;
save(db: Db): Promise<void>;
}
//# sourceMappingURL=index.d.ts.map"
`);
expect(output.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";;;UAGU,E;;;aAIG,G;;;;WAIK,I;;EAKhB,K;EAIM,I",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(output.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
debug Ignoring 1 global declarations for \\"Record\\"
debug Ignoring 5 global declarations for \\"Promise\\"
"
`);
});

View file

@ -0,0 +1,81 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('prints the function declaration, including comments', async () => {
const result = await run(
`
import { Bar } from './bar';
/**
* Convert a Bar to a string
*/
export function foo<X>(
/**
* Important comment
*/
name: Bar<X>
) {
return name.toString();
}
`,
{
otherFiles: {
'bar.ts': `
export class Bar<T extends { toString(): string }> {
constructor(
private value: T
){}
toString() {
return this.value.toString()
}
}
`,
},
}
);
expect(result.code).toMatchInlineSnapshot(`
"class Bar<T extends {
toString(): string;
}> {
private value;
constructor(value: T);
toString(): string;
}
/**
* Convert a Bar to a string
*/
export function foo<X>(
/**
* Important comment
*/
name: Bar<X>): string;
//# sourceMappingURL=index.d.ts.map"
`);
expect(result.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": "MAAa,G;;;UAED,K;;EAGV,Q;;;;;gBCAc,G",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"bar.ts",
"index.ts",
],
"version": 3,
}
`);
expect(result.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/bar.d.ts
debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
"
`);
});

View file

@ -0,0 +1,90 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
const nodeModules = {
'node_modules/foo/index.ts': `
export class Foo {
render() {
return 'hello'
}
}
`,
'node_modules/bar/index.ts': `
export default class Bar {
render() {
return 'hello'
}
}
`,
};
it('output type links to named import from node modules', async () => {
const output = await run(
`
import { Foo } from 'foo'
export type ValidName = string | Foo
`,
{ otherFiles: nodeModules }
);
expect(output.code).toMatchInlineSnapshot(`
"import { Foo } from 'foo';
export type ValidName = string | Foo
//# sourceMappingURL=index.d.ts.map"
`);
expect(output.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";YACY,S",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(output.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
"
`);
});
it('output type links to default import from node modules', async () => {
const output = await run(
`
import Bar from 'bar'
export type ValidName = string | Bar
`,
{ otherFiles: nodeModules }
);
expect(output.code).toMatchInlineSnapshot(`
"import { Bar } from 'bar';
export type ValidName = string | Bar
//# sourceMappingURL=index.d.ts.map"
`);
expect(output.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";YACY,S",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(output.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
"
`);
});

View file

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('prints the whole interface, including comments', async () => {
const result = await run(`
/**
* This is an interface
*/
export interface Foo<Bar> {
/**
* method
*/
name(): string
/**
* hello
*/
close(): Promise<void>
}
`);
expect(result.code).toMatchInlineSnapshot(`
"/**
* This is an interface
*/
export interface Foo<Bar> {
/**
* method
*/
name(): string;
/**
* hello
*/
close(): Promise<void>;
}
//# sourceMappingURL=index.d.ts.map"
`);
expect(result.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";;;iBAGiB,G",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(result.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
debug Ignoring 5 global declarations for \\"Promise\\"
"
`);
});

View file

@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('collects references from source files which contribute to result', async () => {
const result = await run(
`
/// <reference lib="es2015" />
export type PromiseOfString = Promise<'string'>
export * from './files'
`,
{
otherFiles: {
'files/index.ts': `
/// <reference lib="dom" />
export type MySymbol = Symbol & { __tag: 'MySymbol' }
export * from './foo'
`,
'files/foo.ts': `
/// <reference types="react" />
interface Props {}
export type MyComponent = React.Component<Props>
`,
},
}
);
expect(result.code).toMatchInlineSnapshot(`
"/// <reference lib=\\"es2015\\" />
/// <reference lib=\\"dom\\" />
/// <reference types=\\"react\\" />
export type PromiseOfString = Promise<'string'>
export type MySymbol = Symbol & {
__tag: 'MySymbol';
}
interface Props {
}
export type MyComponent = React.Component<Props>
//# sourceMappingURL=index.d.ts.map"
`);
expect(result.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";;;YACY,e;YCAA,Q;;;UCAF,K;;YACE,W",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
"files/index.ts",
"files/foo.ts",
],
"version": 3,
}
`);
expect(result.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/files/foo.d.ts
debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/files/index.d.ts
debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
debug Ignoring 5 global declarations for \\"Promise\\"
debug Ignoring 4 global declarations for \\"Symbol\\"
debug Ignoring 2 global declarations for \\"Component\\"
debug Ignoring 1 global declarations for \\"React\\"
"
`);
});

View file

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('prints basic type alias', async () => {
const output = await run(`
export type Name = 'foo' | string
function hello(name: Name) {
console.log('hello', name)
}
hello('john')
`);
expect(output.code).toMatchInlineSnapshot(`
"export type Name = 'foo' | string
//# sourceMappingURL=index.d.ts.map"
`);
expect(output.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": "YAAY,I",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(output.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
"
`);
});

View file

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { run } from '../integration_helpers';
it('prints basic variable exports with sourcemaps', async () => {
const output = await run(`
/**
* What is a type
*/
type Type = 'bar' | 'baz'
/** some comment */
export const bar: Type = 'bar'
export var
/**
* checkout bar
*/
baz: Type = 'baz',
/**
* this is foo
*/
foo: Type = 'bar'
export let types = [bar, baz, foo]
`);
expect(output.code).toMatchInlineSnapshot(`
"/**
* What is a type
*/
type Type = 'bar' | 'baz'
/** some comment */
export const bar: Type;
/**
* checkout bar
*/
export var baz: Type;
/**
* this is foo
*/
export var foo: Type;
export let types: (\\"bar\\" | \\"baz\\")[];
//# sourceMappingURL=index.d.ts.map"
`);
expect(output.map).toMatchInlineSnapshot(`
Object {
"file": "index.d.ts",
"mappings": ";;;KAGK,I;;aAGQ,G;;;;WAMX,G;;;;WAIA,G;WAES,K",
"names": Array [],
"sourceRoot": "../../../src",
"sources": Array [
"index.ts",
],
"version": 3,
}
`);
expect(output.logs).toMatchInlineSnapshot(`
"debug loaded sourcemap for packages/kbn-type-summarizer/tests/__tmp__/dist_dts/index.d.ts
"
`);
});

View file

@ -0,0 +1,17 @@
{
"extends": "../../tsconfig.bazel.json",
"compilerOptions": {
"declaration": true,
"declarationMap": true,
"emitDeclarationOnly": false,
"outDir": "target_types",
"types": [
"jest",
"node"
]
},
"include": [
"src/**/*",
"tests/**/*"
]
}

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
require('../src/setup_node_env/ensure_node_preserve_symlinks');
require('source-map-support/register');
require('@kbn/type-summarizer/target_node/bazel_cli');

View file

@ -12,7 +12,7 @@ Please do not import from any other files when looking to use a custom rule
load("//src/dev/bazel:jsts_transpiler.bzl", _jsts_transpiler = "jsts_transpiler")
load("//src/dev/bazel:pkg_npm.bzl", _pkg_npm = "pkg_npm")
load("//src/dev/bazel/pkg_npm_types:index.bzl", _pkg_npm_types = "pkg_npm_types")
load("//src/dev/bazel:pkg_npm_types.bzl", _pkg_npm_types = "pkg_npm_types")
load("//src/dev/bazel:ts_project.bzl", _ts_project = "ts_project")
jsts_transpiler = _jsts_transpiler

View file

@ -72,32 +72,22 @@ def _pkg_npm_types_impl(ctx):
inputs = ctx.files.srcs[:]
inputs.extend(tsconfig_inputs)
inputs.extend(deps_inputs)
inputs.append(ctx.file._generated_package_json_template)
# output dir declaration
package_path = ctx.label.package
package_dir = ctx.actions.declare_directory(ctx.label.name)
outputs = [package_dir]
# gathering template args
template_args = [
"NAME", _get_type_package_name(ctx.attr.package_name)
]
# layout api extractor arguments
extractor_args = ctx.actions.args()
## general args layout
### [0] = base output dir
### [1] = generated package json template input file path
### [2] = stringified template args
### [3] = tsconfig input file path
### [4] = entry point from provided types to summarise
extractor_args.add(package_dir.path)
extractor_args.add(ctx.file._generated_package_json_template.path)
extractor_args.add_joined(template_args, join_with = ",", omit_if_empty = False)
extractor_args.add(tsconfig_inputs[0])
extractor_args.add(_calculate_entrypoint_path(ctx))
extractor_args.add(struct(
packageName = ctx.attr.package_name,
outputDir = package_dir.path,
buildFilePath = ctx.build_file_path,
tsconfigPath = tsconfig_inputs[0].path,
inputPath = _calculate_entrypoint_path(ctx),
).to_json())
run_node(
ctx,
@ -141,7 +131,9 @@ pkg_npm_types = rule(
doc = """Entrypoint name of the types files group to summarise""",
default = "index.d.ts",
),
"package_name": attr.string(),
"package_name": attr.string(
mandatory = True
),
"srcs": attr.label_list(
doc = """Files inside this directory which are inputs for the types to summarise.""",
allow_files = True,
@ -151,11 +143,7 @@ pkg_npm_types = rule(
doc = "Target that executes the npm types package assembler binary",
executable = True,
cfg = "host",
default = Label("//src/dev/bazel/pkg_npm_types:_packager"),
),
"_generated_package_json_template": attr.label(
allow_single_file = True,
default = "package_json.mustache",
default = Label("//packages/kbn-type-summarizer:bazel-cli"),
),
},
)

View file

@ -1,28 +0,0 @@
package(default_visibility = ["//visibility:public"])
load("@build_bazel_rules_nodejs//internal/node:node.bzl", "nodejs_binary")
filegroup(
name = "packager_all_files",
srcs = glob([
"packager/*",
]),
)
exports_files(
[
"package_json.mustache",
],
visibility = ["//visibility:public"]
)
nodejs_binary(
name = "_packager",
data = [
"@npm//@bazel/typescript",
"@npm//@microsoft/api-extractor",
"@npm//mustache",
":packager_all_files"
],
entry_point = ":packager/index.js",
)

View file

@ -1,15 +0,0 @@
#
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License
# 2.0 and the Server Side Public License, v 1; you may not use this file except
# in compliance with, at your election, the Elastic License 2.0 or the Server
# Side Public License, v 1.
#
"""Public API interface for pkg_npm_types rule.
Please do not import from any other files when looking to this rule
"""
load(":pkg_npm_types.bzl", _pkg_npm_types = "pkg_npm_types")
pkg_npm_types = _pkg_npm_types

View file

@ -1,8 +0,0 @@
{
"name": "{{{NAME}}}",
"description": "Generated by Bazel",
"types": "./index.d.ts",
"private": true,
"license": "MIT",
"version": "1.1.0"
}

View file

@ -1,90 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const { format, parseTsconfig } = require('@bazel/typescript');
const { Extractor, ExtractorConfig } = require('@microsoft/api-extractor');
const fs = require('fs');
const path = require('path');
function createApiExtraction(
tsConfig,
entryPoint,
dtsBundleOut,
apiReviewFolder,
acceptApiUpdates = false
) {
const [parsedConfig, errors] = parseTsconfig(tsConfig);
if (errors && errors.length) {
console.error(format('', errors));
return 1;
}
const pkgJson = path.resolve(path.dirname(entryPoint), 'package.json');
if (!fs.existsSync(pkgJson)) {
fs.writeFileSync(
pkgJson,
JSON.stringify({
name: 'GENERATED-BY-BAZEL',
description: 'This is a dummy package.json as API Extractor always requires one.',
types: './index.d.ts',
private: true,
license: 'SSPL-1.0 OR Elastic License 2.0',
version: '1.0.0',
})
);
}
// API extractor doesn't always support the version of TypeScript used in the repo
// example: at the moment it is not compatable with 3.2
// to use the internal TypeScript we shall not create a program but rather pass a parsed tsConfig.
const parsedTsConfig = parsedConfig.config;
const extractorOptions = {
localBuild: acceptApiUpdates,
};
const configObject = {
compiler: {
overrideTsconfig: parsedTsConfig,
},
projectFolder: path.resolve(path.dirname(tsConfig)),
mainEntryPointFilePath: path.resolve(entryPoint),
apiReport: {
enabled: !!apiReviewFolder,
// TODO(alan-agius4): remove this folder name when the below issue is solved upstream
// See: https://github.com/microsoft/web-build-tools/issues/1470
reportFileName: (apiReviewFolder && path.resolve(apiReviewFolder)) || 'invalid',
},
docModel: {
enabled: false,
},
dtsRollup: {
enabled: !!dtsBundleOut,
untrimmedFilePath: dtsBundleOut && path.resolve(dtsBundleOut),
},
tsdocMetadata: {
enabled: false,
},
};
const options = {
configObject,
packageJson: undefined,
packageJsonFullPath: pkgJson,
configObjectFullPath: undefined,
};
const extractorConfig = ExtractorConfig.prepare(options);
const { succeeded } = Extractor.invoke(extractorConfig, extractorOptions);
// API extractor errors are emitted by it's logger.
return succeeded ? 0 : 1;
}
module.exports.createApiExtraction = createApiExtraction;

View file

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
const fs = require('fs');
const Mustache = require('mustache');
const path = require('path');
function generatePackageJson(outputBasePath, packageJsonTemplatePath, rawPackageJsonTemplateArgs) {
const packageJsonTemplateArgsInTuples = rawPackageJsonTemplateArgs.reduce(
(a, v) => {
const lastTupleIdx = a.length - 1;
const lastTupleSize = a[lastTupleIdx].length;
if (lastTupleSize < 2) {
a[lastTupleIdx].push(v);
return a;
}
return a.push([v]);
},
[[]]
);
const packageJsonTemplateArgs = Object.fromEntries(new Map(packageJsonTemplateArgsInTuples));
try {
const template = fs.readFileSync(packageJsonTemplatePath);
const renderedTemplate = Mustache.render(template.toString(), packageJsonTemplateArgs);
fs.writeFileSync(path.resolve(outputBasePath, 'package.json'), renderedTemplate);
} catch (e) {
console.error(e);
return 1;
}
return 0;
}
module.exports.generatePackageJson = generatePackageJson;

View file

@ -1,46 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
const { createApiExtraction } = require('./create_api_extraction');
const { generatePackageJson } = require('./generate_package_json');
const path = require('path');
const DEBUG = false;
if (require.main === module) {
if (DEBUG) {
console.error(`
pkg_npm_types packager: running with
cwd: ${process.cwd()}
argv:
${process.argv.join('\n ')}
`);
}
// layout args
const [
outputBasePath,
packageJsonTemplatePath,
stringifiedPackageJsonTemplateArgs,
tsConfig,
entryPoint,
] = process.argv.slice(2);
const dtsBundleOutput = path.resolve(outputBasePath, 'index.d.ts');
// generate pkg json output
const generatePackageJsonRValue = generatePackageJson(
outputBasePath,
packageJsonTemplatePath,
stringifiedPackageJsonTemplateArgs.split(',')
);
// create api extraction output
const createApiExtractionRValue = createApiExtraction(tsConfig, entryPoint, dtsBundleOutput);
// setup correct exit code
process.exitCode = generatePackageJsonRValue || createApiExtractionRValue;
}

View file

@ -82,4 +82,5 @@ export const PROJECTS = [
...findProjects('test/plugin_functional/plugins/*/tsconfig.json'),
...findProjects('test/interpreter_functional/plugins/*/tsconfig.json'),
...findProjects('test/server_integration/__fixtures__/plugins/*/tsconfig.json'),
...findProjects('packages/kbn-type-summarizer/tests/tsconfig.json'),
];

View file

@ -4047,6 +4047,10 @@
version "0.0.0"
uid ""
"@kbn/type-summarizer@link:bazel-bin/packages/kbn-type-summarizer":
version "0.0.0"
uid ""
"@kbn/typed-react-router-config@link:bazel-bin/packages/kbn-typed-react-router-config":
version "0.0.0"
uid ""