[Code] using a static linked nodegit (#27189)

* patch native modules for nodegit during kibana build

* bump sinon version

* try not using network during git tests.

* add clone option

* move nodegit related tests from jest to mocha (#27366)

* move nodegit related tests from jest to mocha
This commit is contained in:
Yulong 2018-12-18 03:47:02 +08:00 committed by Fuyao Zhao
parent 6aa1cc4fee
commit cb5ab56bd5
13 changed files with 728 additions and 536 deletions

View file

@ -371,7 +371,7 @@
"mocha": "3.3.0",
"murmurhash3js": "3.0.1",
"mutation-observer": "^1.0.3",
"nock": "8.0.0",
"nock": "10.0.4",
"node-sass": "^4.9.4",
"normalize-path": "^3.0.0",
"pixelmatch": "4.0.2",
@ -381,7 +381,7 @@
"proxyquire": "1.7.11",
"regenerate": "^1.4.0",
"simple-git": "1.37.0",
"sinon": "^5.0.7",
"sinon": "v7.2.2",
"strip-ansi": "^3.0.1",
"supertest": "^3.1.0",
"supertest-as-promised": "^4.0.2",

View file

@ -43,6 +43,7 @@ import {
ExtractNodeBuildsTask,
InstallDependenciesTask,
OptimizeBuildTask,
PatchNativeModulesTask,
RemovePackageJsonDepsTask,
RemoveWorkspacesTask,
TranspileBabelTask,
@ -128,6 +129,7 @@ export async function buildDistributables(options) {
* directories and perform platform-specific steps
*/
await run(CreateArchivesSourcesTask);
await run(PatchNativeModulesTask);
await run(CleanExtraBinScriptsTask);
await run(CleanExtraBrowsersTask);
await run(CleanNodeBuildsTask);

View file

@ -37,3 +37,4 @@ export * from './transpile_typescript_task';
export * from './transpile_scss_task';
export * from './verify_env_task';
export * from './write_sha_sums_task';
export * from './patch_native_modules_task';

View file

@ -0,0 +1,106 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { scanCopy, untar, deleteAll } from '../lib';
import { writeSync, openSync, existsSync, unlinkSync } from 'fs';
import binaryInfo from 'nodegit/lib/utils/binary_info';
import wreck from 'wreck';
import mkdirp from 'mkdirp';
import { dirname, join, basename } from 'path';
async function download(url, destination, log) {
const response = await wreck.request('GET', url);
if (response.statusCode !== 200) {
throw new Error(
`Unexpected status code ${response.statusCode} when downloading ${url}`
);
}
mkdirp.sync(dirname(destination));
const fileHandle = openSync(destination, 'w');
await new Promise((resolve, reject) => {
response.on('data', chunk => {
writeSync(fileHandle, chunk);
});
response.on('error', (err) => {
if (existsSync(destination)) {
// remove the unfinished file
unlinkSync(destination);
}
reject(err);
});
response.on('end', () => {
log.debug('Downloaded ', url);
resolve();
});
});
}
async function downloadAndExtractTarball(url, dest, log, retry) {
try {
await download(url, dest, log);
const extractDir = join(dirname(dest), basename(dest, '.tar.gz'));
await untar(dest, extractDir, {
strip: 1
});
return extractDir;
} catch (e) {
if (retry > 0) {
await downloadAndExtractTarball(url, dest, log, retry - 1);
} else {
throw e;
}
}
}
async function patchNodeGit(config, log, build, platform) {
const plat = platform.isWindows() ? 'win32' : platform.getName();
const arch = platform.getNodeArch().split('-')[1];
const info = binaryInfo(plat, arch);
const downloadUrl = info.hosted_tarball;
const packageName = info.package_name;
const downloadPath = config.resolveFromRepo('.nodegit_binaries', packageName);
const extractDir = await downloadAndExtractTarball(downloadUrl, downloadPath, log, 3);
const destination = build.resolvePathForPlatform(platform, 'node_modules', 'nodegit', 'build', 'Release');
log.debug('Replacing nodegit binaries from ', extractDir);
await deleteAll([destination], log);
await scanCopy({
source: extractDir,
destination: destination,
time: new Date(),
});
}
async function cleanNodeGitPatchDir(config, log) {
await deleteAll([config.resolveFromRepo('.nodegit_binaries')], log);
}
export const PatchNativeModulesTask = {
description: 'Patching platform-specific native modules directories',
async run(config, log, build) {
await cleanNodeGitPatchDir(config, log);
await Promise.all(config.getTargetPlatforms().map(async platform => {
await patchNodeGit(config, log, build, platform);
}));
}
};

View file

@ -236,7 +236,7 @@
"moment-timezone": "^0.5.14",
"monaco-editor": "^0.14.3",
"ngreact": "^0.5.1",
"nodegit": "git+https://github.com/elastic/nodegit.git#6d5c2050426716127f602d2b07d885fef9b8dadb",
"nodegit": "git+https://github.com/elastic/nodegit.git#v0.24.0-alpha.6",
"nodemailer": "^4.6.4",
"object-path-immutable": "^0.5.3",
"oppsy": "^2.0.0",

View file

@ -0,0 +1,38 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import assert from 'assert';
import { execSync } from 'child_process';
import fs from 'fs';
import os from 'os';
import path from 'path';
import rimraf from 'rimraf';
import { getDefaultBranch } from '../git_operations';
describe('git_operations', () => {
it('get default branch from a non master repo', async () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test_git'));
// create a non-master using git commands
const shell = `
git init
git add 'run.sh'
git commit -m 'init commit'
git branch -m trunk
`;
fs.writeFileSync(path.join(tmpDir, 'run.sh'), shell, 'utf-8');
execSync('sh ./run.sh', {
cwd: tmpDir,
});
try {
const defaultBranch = await getDefaultBranch(tmpDir);
assert.strictEqual(defaultBranch, 'trunk');
} finally {
rimraf.sync(tmpDir);
}
return '';
});
});

View file

@ -0,0 +1,275 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import assert from 'assert';
import fs from 'fs';
import Git, { CloneOptions } from 'nodegit';
import path from 'path';
import rimraf from 'rimraf';
import sinon from 'sinon';
import { LspIndexer } from '../indexer/lsp_indexer';
import { RepositoryGitStatusReservedField } from '../indexer/schema';
import { AnyObject, EsClient } from '../lib/esqueue';
import { Log } from '../log';
import { InstallManager } from '../lsp/install_manager';
import { LspService } from '../lsp/lsp_service';
import { ServerOptions } from '../server_options';
import { ConsoleLoggerFactory } from '../utils/console_logger_factory';
const log: Log = (new ConsoleLoggerFactory().getLogger(['test']) as any) as Log;
const emptyAsyncFunc = async (_: AnyObject): Promise<any> => {
Promise.resolve({});
};
const esClient = {
bulk: emptyAsyncFunc,
get: emptyAsyncFunc,
deleteByQuery: emptyAsyncFunc,
indices: {
existsAlias: emptyAsyncFunc,
create: emptyAsyncFunc,
putAlias: emptyAsyncFunc,
},
};
function prepareProject(url: string, p: string) {
const opts: CloneOptions = {
fetchOpts: {
callbacks: {
certificateCheck: () => 1,
},
},
};
return new Promise(resolve => {
if (!fs.existsSync(p)) {
rimraf(p, error => {
Git.Clone.clone(url, p, opts).then(repo => {
resolve(repo);
});
});
} else {
resolve();
}
});
}
const repoUri = 'github.com/Microsoft/TypeScript-Node-Starter';
const options = {
enabled: true,
queueIndex: '.code-worker-queue',
queueTimeout: 60 * 60 * 1000, // 1 hour by default
updateFreqencyMs: 5 * 60 * 1000, // 5 minutes by default
indexFrequencyMs: 24 * 60 * 60 * 1000, // 1 day by default
lspRequestTimeoutMs: 5 * 60, // timeout a request over 30s
repos: [],
maxWorkspace: 5, // max workspace folder for each language server
isAdmin: true, // If we show the admin buttons
disableScheduler: true, // Temp option to disable all schedulers.
};
const config = {
get(key: string) {
if (key === 'path.data') {
return '/tmp/test';
}
},
};
const serverOptions = new ServerOptions(options, config);
function cleanWorkspace() {
return new Promise(resolve => {
rimraf(serverOptions.workspacePath, resolve);
});
}
function setupEsClientSpy() {
// Mock a git status of the repo indicating the the repo is fully cloned already.
const getSpy = sinon.fake.returns(
Promise.resolve({
_source: {
[RepositoryGitStatusReservedField]: {
uri: 'github.com/Microsoft/TypeScript-Node-Starter',
progress: 100,
timestamp: new Date(),
cloneProgress: {
isCloned: true,
},
},
},
})
);
const existsAliasSpy = sinon.fake.returns(false);
const createSpy = sinon.spy();
const putAliasSpy = sinon.spy();
const deleteByQuerySpy = sinon.spy();
const bulkSpy = sinon.spy();
esClient.bulk = bulkSpy;
esClient.indices.existsAlias = existsAliasSpy;
esClient.indices.create = createSpy;
esClient.indices.putAlias = putAliasSpy;
esClient.get = getSpy;
esClient.deleteByQuery = deleteByQuerySpy;
return {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
};
}
function setupLsServiceSendRequestSpy(): sinon.SinonSpy {
return sinon.fake.returns(
Promise.resolve({
result: [
{
// 1 mock symbol for each file
symbols: [
{
symbolInformation: {
name: 'mocksymbolname',
},
},
],
// 1 mock reference for each file
references: [{}],
},
],
})
);
}
describe('lsp_indexer', () => {
// @ts-ignore
before(async () => {
return new Promise(resolve => {
rimraf(serverOptions.repoPath, resolve);
});
});
beforeEach(async function() {
// @ts-ignore
this.timeout(200000);
return await prepareProject(
'https://github.com/Microsoft/TypeScript-Node-Starter.git',
path.join(serverOptions.repoPath, repoUri)
);
});
// @ts-ignore
after(() => {
return cleanWorkspace();
});
afterEach(() => {
sinon.restore();
});
it('Normal LSP index process.', async () => {
// Setup the esClient spies
const {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
} = setupEsClientSpy();
const lspservice = new LspService(
'127.0.0.1',
serverOptions,
esClient as EsClient,
{} as InstallManager,
new ConsoleLoggerFactory()
);
lspservice.sendRequest = setupLsServiceSendRequestSpy();
const indexer = new LspIndexer(
'github.com/Microsoft/TypeScript-Node-Starter',
'master',
lspservice,
serverOptions,
esClient as EsClient,
log as Log
);
await indexer.start();
// Expect EsClient get called once to get the repo git status.
assert.ok(getSpy.calledOnce);
// Expect EsClient deleteByQuery called 3 times for repository cleaning before
// the index for document, symbol and reference, respectively.
assert.strictEqual(deleteByQuerySpy.callCount, 3);
// Ditto for index and alias creation
assert.strictEqual(existsAliasSpy.callCount, 3);
assert.strictEqual(createSpy.callCount, 3);
assert.strictEqual(putAliasSpy.callCount, 3);
// There are 22 files in the repo. 1 file + 1 symbol + 1 reference = 3 objects to
// index for each file. Total doc indexed should be 3 * 22 = 66, which can be
// fitted into a single batch index.
assert.ok(bulkSpy.calledOnce);
assert.strictEqual(bulkSpy.getCall(0).args[0].body.length, 66 * 2);
// @ts-ignore
}).timeout(20000);
it('Cancel LSP index process.', async () => {
// Setup the esClient spies
const {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
} = setupEsClientSpy();
const lspservice = new LspService(
'127.0.0.1',
serverOptions,
esClient as EsClient,
{} as InstallManager,
new ConsoleLoggerFactory()
);
lspservice.sendRequest = setupLsServiceSendRequestSpy();
const indexer = new LspIndexer(
'github.com/Microsoft/TypeScript-Node-Starter',
'master',
lspservice,
serverOptions,
esClient as EsClient,
log as Log
);
// Cancel the indexer before start.
indexer.cancel();
await indexer.start();
// Expect EsClient get called once to get the repo git status.
assert.ok(getSpy.calledOnce);
// Expect EsClient deleteByQuery called 3 times for repository cleaning before
// the index for document, symbol and reference, respectively.
assert.strictEqual(deleteByQuerySpy.callCount, 3);
// Ditto for index and alias creation
assert.strictEqual(existsAliasSpy.callCount, 3);
assert.strictEqual(createSpy.callCount, 3);
assert.strictEqual(putAliasSpy.callCount, 3);
// Because the indexer is cancelled already in the begining. 0 doc should be
// indexed and thus bulk won't be called.
assert.ok(bulkSpy.notCalled);
});
// @ts-ignore
}).timeout(20000);

View file

@ -0,0 +1,205 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/* tslint:disable */
import fs from 'fs';
import Git from 'nodegit';
// import rimraf from 'rimraf';
import sinon from 'sinon';
import path from 'path';
import mkdirp from 'mkdirp';
import { LspService } from "../lsp/lsp_service";
import { ServerOptions } from "../server_options";
import { ConsoleLoggerFactory } from "../utils/console_logger_factory";
import { RepositoryGitStatusReservedField, RepositoryTypeName } from '../indexer/schema';
import { InstallManager } from "../lsp/install_manager";
import * as os from "os";
import assert from 'assert';
const filename = 'hello.ts';
describe('lsp_service tests', () => {
async function prepareProject(repoPath: string) {
mkdirp.sync(repoPath);
const repo = await Git.Repository.init(repoPath, 0);
const helloContent = "console.log('hello world');";
fs.writeFileSync(path.join(repo.workdir(), filename), helloContent, 'utf8');
const index = await repo.refreshIndex();
await index.addByPath(filename);
index.write();
const treeId = await index.writeTree();
const committer = Git.Signature.create("tester",
"test@test.com", Date.now() / 1000, 60);
const commit = await repo.createCommit("HEAD", committer, committer, "commit for test", treeId, []);
console.log(`created commit ${commit.tostrS()}`);
return repo;
}
const options = {
enabled: true,
queueIndex: '.code-worker-queue',
queueTimeout: 60 * 60 * 1000, // 1 hour by default
updateFreqencyMs: 5 * 60 * 1000, // 5 minutes by default
indexFrequencyMs: 24 * 60 * 60 * 1000, // 1 day by default
lspRequestTimeoutMs: 5 * 60, // timeout a request over 30s
repos: [],
maxWorkspace: 5, // max workspace folder for each language server
isAdmin: true, // If we show the admin buttons
disableScheduler: true, // Temp option to disable all schedulers.
};
const tmpDataPath = fs.mkdtempSync(path.join(os.tmpdir(), 'code_test'));
console.log(`tmp data path is ${tmpDataPath}`);
const config = {
get(key: string) {
if (key === 'path.data') {
return tmpDataPath;
}
}
};
const serverOptions = new ServerOptions(options, config);
const installManager = new InstallManager(serverOptions);
function mockEsClient(): any {
const api = {
get: function (params: any) {
const {type} = params;
if (type === RepositoryTypeName) {
return {
_source: {
[RepositoryGitStatusReservedField]: {
cloneProgress: {
isCloned: true
}
}
}
}
}
},
};
return api;
}
const repoUri = 'github.com/test/test_repo';
// @ts-ignore
before(async () => {
await prepareProject(
path.join(serverOptions.repoPath, repoUri)
);
});
function comparePath(pathA: string, pathB: string) {
const pa = fs.realpathSync(pathA);
const pb = fs.realpathSync(pathB);
return path.resolve(pa) === path.resolve(pb);
}
it('process a hover request', async () => {
let esClient = mockEsClient();
const revision = 'master';
const lspservice = new LspService('127.0.0.1', serverOptions, esClient, installManager, new ConsoleLoggerFactory());
try {
const params = {
textDocument: {
uri: `git://${repoUri}/blob/${revision}/${filename}`,
},
position: {
line: 0,
character: 1,
}
};
const workspaceHandler = lspservice.workspaceHandler;
const wsSpy = sinon.spy(workspaceHandler, 'handleRequest');
const controller = lspservice.controller;
const ctrlSpy = sinon.spy(controller, 'handleRequest');
let method = 'textDocument/hover';
const response = await lspservice.sendRequest(method, params);
assert.ok(response);
assert.ok(response.result.contents);
wsSpy.restore();
ctrlSpy.restore();
const workspaceFolderExists = fs.existsSync(path.join(serverOptions.workspacePath, repoUri, revision));
// workspace is opened
assert.ok(workspaceFolderExists);
const workspacePath = fs.realpathSync(path.resolve(serverOptions.workspacePath, repoUri, revision));
// workspace handler is working, filled workspacePath
sinon.assert.calledWith(ctrlSpy, sinon.match.has("workspacePath", sinon.match((value) => comparePath(value, workspacePath))));
// uri is changed by workspace handler
sinon.assert.calledWith(ctrlSpy, sinon.match.hasNested("params.textDocument.uri", `file://${workspacePath}/${filename}`));
return;
} finally {
await lspservice.shutdown()
}
// @ts-ignore
}).timeout(10000);
it("unload a workspace", async () => {
let esClient = mockEsClient();
const revision = 'master';
const lspservice = new LspService('127.0.0.1', serverOptions, esClient, installManager, new ConsoleLoggerFactory());
try {
const params = {
textDocument: {
uri: `git://${repoUri}/blob/${revision}/${filename}`,
},
position: {
line: 0,
character: 1,
}
};
let method = 'textDocument/hover';
// send a dummy request to open a workspace;
const response = await lspservice.sendRequest(method, params);
assert.ok(response);
const workspacePath = path.resolve(serverOptions.workspacePath, repoUri, revision);
const workspaceFolderExists = fs.existsSync(workspacePath);
// workspace is opened
assert.ok(workspaceFolderExists);
const controller = lspservice.controller;
// @ts-ignore
const languageServer = controller.languageServerMap['typescript'];
const realWorkspacePath = fs.realpathSync(workspacePath);
// @ts-ignore
const handler = languageServer.languageServerHandlers[realWorkspacePath];
const exitSpy = sinon.spy(handler, 'exit');
const unloadSpy = sinon.spy(handler, 'unloadWorkspace');
await lspservice.deleteWorkspace(repoUri);
unloadSpy.restore();
exitSpy.restore();
sinon.assert.calledWith(unloadSpy, realWorkspacePath);
// typescript language server for this workspace should be closed
sinon.assert.calledOnce(exitSpy);
// the workspace folder should be deleted
const exists = fs.existsSync(realWorkspacePath);
assert.strictEqual(exists, false);
return;
} finally {
await lspservice.shutdown()
}
// @ts-ignore
}).timeout(10000);
});

View file

@ -1,36 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import fs from 'fs';
import Git from 'nodegit';
import { getDefaultBranch } from './git_operations';
function rmDir(dirPath: string) {
const files = fs.readdirSync(dirPath);
if (files.length > 0) {
for (const f of files) {
const filePath = dirPath + '/' + f;
if (fs.statSync(filePath).isFile()) {
fs.unlinkSync(filePath);
} else {
rmDir(filePath);
}
}
}
fs.rmdirSync(dirPath);
}
it('get default branch from a non master repo', async () => {
const path = '/tmp/testtrunk';
if (fs.existsSync(path)) {
rmDir(path);
}
await Git.Clone.clone('https://github.com/spacedragon/testtrunk.git', path);
const defaultBranch = await getDefaultBranch(path);
expect(defaultBranch).toEqual('trunk');
rmDir(path);
return '';
});

View file

@ -1,263 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import fs from 'fs';
import Git from 'nodegit';
import path from 'path';
import rimraf from 'rimraf';
import sinon from 'sinon';
import { AnyObject, EsClient } from '../lib/esqueue';
import { Log } from '../log';
import { InstallManager } from '../lsp/install_manager';
import { LspService } from '../lsp/lsp_service';
import { ServerOptions } from '../server_options';
import { ConsoleLoggerFactory } from '../utils/console_logger_factory';
import { LspIndexer } from './lsp_indexer';
import { RepositoryGitStatusReservedField } from './schema';
jest.setTimeout(30000);
const log: Log = (new ConsoleLoggerFactory().getLogger(['test']) as any) as Log;
const emptyAsyncFunc = async (_: AnyObject): Promise<any> => {
Promise.resolve({});
};
const esClient = {
bulk: emptyAsyncFunc,
get: emptyAsyncFunc,
deleteByQuery: emptyAsyncFunc,
indices: {
existsAlias: emptyAsyncFunc,
create: emptyAsyncFunc,
putAlias: emptyAsyncFunc,
},
};
function prepareProject(url: string, p: string) {
return new Promise(resolve => {
if (!fs.existsSync(p)) {
rimraf(p, error => {
Git.Clone.clone(url, p).then(repo => {
resolve(repo);
});
});
} else {
resolve();
}
});
}
const repoUri = 'github.com/Microsoft/TypeScript-Node-Starter';
const options = {
enabled: true,
queueIndex: '.code-worker-queue',
queueTimeout: 60 * 60 * 1000, // 1 hour by default
updateFreqencyMs: 5 * 60 * 1000, // 5 minutes by default
indexFrequencyMs: 24 * 60 * 60 * 1000, // 1 day by default
lspRequestTimeoutMs: 5 * 60, // timeout a request over 30s
repos: [],
maxWorkspace: 5, // max workspace folder for each language server
isAdmin: true, // If we show the admin buttons
disableScheduler: true, // Temp option to disable all schedulers.
};
const config = {
get(key: string) {
if (key === 'path.data') {
return '/tmp/test';
}
},
};
const serverOptions = new ServerOptions(options, config);
function cleanWorkspace() {
return new Promise(resolve => {
rimraf(serverOptions.workspacePath, resolve);
});
}
function setupEsClientSpy() {
// Mock a git status of the repo indicating the the repo is fully cloned already.
const getSpy = sinon.fake.returns(
Promise.resolve({
_source: {
[RepositoryGitStatusReservedField]: {
uri: 'github.com/Microsoft/TypeScript-Node-Starter',
progress: 100,
timestamp: new Date(),
cloneProgress: {
isCloned: true,
},
},
},
})
);
const existsAliasSpy = sinon.fake.returns(false);
const createSpy = sinon.spy();
const putAliasSpy = sinon.spy();
const deleteByQuerySpy = sinon.spy();
const bulkSpy = sinon.spy();
esClient.bulk = bulkSpy;
esClient.indices.existsAlias = existsAliasSpy;
esClient.indices.create = createSpy;
esClient.indices.putAlias = putAliasSpy;
esClient.get = getSpy;
esClient.deleteByQuery = deleteByQuerySpy;
return {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
};
}
function setupLsServiceSendRequestSpy(): sinon.SinonSpy {
return sinon.fake.returns(
Promise.resolve({
result: [
{
// 1 mock symbol for each file
symbols: [
{
symbolInformation: {
name: 'mocksymbolname',
},
},
],
// 1 mock reference for each file
references: [{}],
},
],
})
);
}
beforeAll(async () => {
return new Promise(resolve => {
rimraf(serverOptions.repoPath, resolve);
});
});
beforeEach(async () => {
await prepareProject(
'https://github.com/Microsoft/TypeScript-Node-Starter.git',
path.join(serverOptions.repoPath, repoUri)
);
});
afterAll(() => {
return cleanWorkspace();
});
afterEach(() => {
sinon.restore();
});
test('Normal LSP index process.', async () => {
// Setup the esClient spies
const {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
} = setupEsClientSpy();
const lspservice = new LspService(
'127.0.0.1',
serverOptions,
esClient as EsClient,
{} as InstallManager,
new ConsoleLoggerFactory()
);
lspservice.sendRequest = setupLsServiceSendRequestSpy();
const indexer = new LspIndexer(
'github.com/Microsoft/TypeScript-Node-Starter',
'master',
lspservice,
serverOptions,
esClient as EsClient,
log as Log
);
await indexer.start();
// Expect EsClient get called once to get the repo git status.
expect(getSpy.calledOnce).toBeTruthy();
// Expect EsClient deleteByQuery called 3 times for repository cleaning before
// the index for document, symbol and reference, respectively.
expect(deleteByQuerySpy.callCount).toEqual(3);
// Ditto for index and alias creation
expect(existsAliasSpy.callCount).toEqual(3);
expect(createSpy.callCount).toEqual(3);
expect(putAliasSpy.callCount).toEqual(3);
// There are 22 files in the repo. 1 file + 1 symbol + 1 reference = 3 objects to
// index for each file. Total doc indexed should be 3 * 22 = 66, which can be
// fitted into a single batch index.
expect(bulkSpy.calledOnce).toBeTruthy();
expect(bulkSpy.getCall(0).args[0].body.length).toEqual(66 * 2);
});
test('Cancel LSP index process.', async () => {
// Setup the esClient spies
const {
getSpy,
existsAliasSpy,
createSpy,
putAliasSpy,
deleteByQuerySpy,
bulkSpy,
} = setupEsClientSpy();
const lspservice = new LspService(
'127.0.0.1',
serverOptions,
esClient as EsClient,
{} as InstallManager,
new ConsoleLoggerFactory()
);
lspservice.sendRequest = setupLsServiceSendRequestSpy();
const indexer = new LspIndexer(
'github.com/Microsoft/TypeScript-Node-Starter',
'master',
lspservice,
serverOptions,
esClient as EsClient,
log as Log
);
// Cancel the indexer before start.
indexer.cancel();
await indexer.start();
// Expect EsClient get called once to get the repo git status.
expect(getSpy.calledOnce).toBeTruthy();
// Expect EsClient deleteByQuery called 3 times for repository cleaning before
// the index for document, symbol and reference, respectively.
expect(deleteByQuerySpy.callCount).toEqual(3);
// Ditto for index and alias creation
expect(existsAliasSpy.callCount).toEqual(3);
expect(createSpy.callCount).toEqual(3);
expect(putAliasSpy.callCount).toEqual(3);
// Because the indexer is cancelled already in the begining. 0 doc should be
// indexed and thus bulk won't be called.
expect(bulkSpy.notCalled).toBeTruthy();
});

View file

@ -1,207 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/* tslint:disable */
import fs from 'fs';
import Git from 'nodegit';
// import rimraf from 'rimraf';
import sinon from 'sinon';
import path from 'path';
import mkdirp from 'mkdirp';
import { LspService } from "./lsp_service";
import { ServerOptions } from "../server_options";
import { ConsoleLoggerFactory } from "../utils/console_logger_factory";
import { RepositoryGitStatusReservedField, RepositoryTypeName } from '../indexer/schema';
import { InstallManager } from "./install_manager";
import * as os from "os";
jest.setTimeout(60000);
const filename = 'hello.ts';
async function prepareProject(repoPath: string) {
mkdirp.sync(repoPath);
const repo = await Git.Repository.init(repoPath, 0);
const helloContent = "console.log('hello world');";
fs.writeFileSync(path.join(repo.workdir(), filename), helloContent, 'utf8');
const index = await repo.refreshIndex();
await index.addByPath(filename);
index.write();
const treeId = await index.writeTree();
const committer = Git.Signature.create("tester",
"test@test.com", Date.now() / 1000, 60);
const commit = await repo.createCommit("HEAD", committer, committer, "commit for test", treeId, []);
console.log(`created commit ${commit.tostrS()}`);
return repo;
}
const options = {
enabled: true,
queueIndex: '.code-worker-queue',
queueTimeout: 60 * 60 * 1000, // 1 hour by default
updateFreqencyMs: 5 * 60 * 1000, // 5 minutes by default
indexFrequencyMs: 24 * 60 * 60 * 1000, // 1 day by default
lspRequestTimeoutMs: 5 * 60, // timeout a request over 30s
repos: [],
maxWorkspace: 5, // max workspace folder for each language server
isAdmin: true, // If we show the admin buttons
disableScheduler: true, // Temp option to disable all schedulers.
};
const tmpDataPath = fs.mkdtempSync(path.join(os.tmpdir(), 'code_test'));
console.log(`tmp data path is ${tmpDataPath}`);
const config = {
get(key: string) {
if (key === 'path.data') {
return tmpDataPath;
}
}
};
const serverOptions = new ServerOptions(options, config);
const installManager = new InstallManager(serverOptions);
function mockEsClient(): any {
const api = {
get: function (params: any) {
const { type } = params;
if (type === RepositoryTypeName) {
return {
_source: {
[RepositoryGitStatusReservedField]: {
cloneProgress: {
isCloned: true
}
}
}
}
}
},
};
return api;
}
const repoUri = 'github.com/test/test_repo';
beforeAll(async () => {
await prepareProject(
path.join(serverOptions.repoPath, repoUri)
);
});
afterAll(() => {
// return rimraf.sync(tmpDataPath);
});
function comparePath(pathA: string, pathB: string) {
const pa = fs.realpathSync(pathA);
const pb = fs.realpathSync(pathB);
return path.resolve(pa) === path.resolve(pb);
}
test('process a hover request', async () => {
let esClient = mockEsClient();
const revision = 'master';
const lspservice = new LspService('127.0.0.1', serverOptions, esClient, installManager, new ConsoleLoggerFactory());
try {
const params = {
textDocument: {
uri: `git://${repoUri}/blob/${revision}/${filename}`,
},
position: {
line: 0,
character: 1,
}
};
const workspaceHandler = lspservice.workspaceHandler;
const wsSpy = sinon.spy(workspaceHandler, 'handleRequest');
const controller = lspservice.controller;
const ctrlSpy = sinon.spy(controller, 'handleRequest');
let method = 'textDocument/hover';
const response = await lspservice.sendRequest(method, params);
expect(response).toBeTruthy();
expect(response.result.contents).toBeDefined();
wsSpy.restore();
ctrlSpy.restore();
const workspaceFolderExists = fs.existsSync(path.join(serverOptions.workspacePath, repoUri, revision));
// workspace is opened
expect(workspaceFolderExists).toBeTruthy();
const workspacePath = fs.realpathSync(path.resolve(serverOptions.workspacePath, repoUri, revision));
// workspace handler is working, filled workspacePath
sinon.assert.calledWith(ctrlSpy, sinon.match.has("workspacePath", sinon.match((value) => comparePath(value, workspacePath))));
// uri is changed by workspace handler
sinon.assert.calledWith(ctrlSpy, sinon.match.hasNested("params.textDocument.uri", `file://${workspacePath}/${filename}`));
} finally {
await lspservice.shutdown()
}
return undefined;
});
test("unload a workspace", async () => {
let esClient = mockEsClient();
const revision = 'master';
const lspservice = new LspService('127.0.0.1', serverOptions, esClient, installManager, new ConsoleLoggerFactory());
try {
const params = {
textDocument: {
uri: `git://${repoUri}/blob/${revision}/${filename}`,
},
position: {
line: 0,
character: 1,
}
};
let method = 'textDocument/hover';
// send a dummy request to open a workspace;
const response = await lspservice.sendRequest(method, params);
expect(response).toBeTruthy();
const workspacePath = path.resolve(serverOptions.workspacePath, repoUri, revision);
const workspaceFolderExists = fs.existsSync(workspacePath);
// workspace is opened
expect(workspaceFolderExists).toBeTruthy();
const controller = lspservice.controller;
// @ts-ignore
const languageServer = controller.languageServerMap['typescript'];
const realWorkspacePath = fs.realpathSync(workspacePath);
// @ts-ignore
const handler = languageServer.languageServerHandlers[realWorkspacePath];
const exitSpy = sinon.spy(handler, 'exit');
const unloadSpy = sinon.spy(handler, 'unloadWorkspace');
await lspservice.deleteWorkspace(repoUri);
unloadSpy.restore();
exitSpy.restore();
sinon.assert.calledWith(unloadSpy, realWorkspacePath);
// typescript language server for this workspace should be closed
sinon.assert.calledOnce(exitSpy);
// the workspace folder should be deleted
const exists = fs.existsSync(realWorkspacePath);
expect(exists).toBeFalsy();
} finally {
await lspservice.shutdown()
}
return undefined;
});

View file

@ -29,9 +29,10 @@ export default (gulp, { mocha }) => {
gulp.task('testserver', () => {
const globs = [
'common/**/__tests__/**/*.js',
'common/**/__tests__/**/*.ts',
'server/**/__tests__/**/*.js',
'server/**/__tests__/**/*.ts',
].concat(forPluginServerTests());
return gulp.src(globs, { read: false })
.pipe(mocha(MOCHA_OPTIONS));
});
@ -52,4 +53,4 @@ export default (gulp, { mocha }) => {
});
});
});
};
};

120
yarn.lock
View file

@ -1101,6 +1101,13 @@
resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd"
integrity sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==
"@sinonjs/commons@^1.0.2", "@sinonjs/commons@^1.2.0":
version "1.3.0"
resolved "http://registry.npm.taobao.org/@sinonjs/commons/download/@sinonjs/commons-1.3.0.tgz#50a2754016b6f30a994ceda6d9a0a8c36adda849"
integrity sha1-UKJ1QBa28wqZTO2m2aCow2rdqEk=
dependencies:
type-detect "4.0.8"
"@sinonjs/formatio@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@sinonjs/formatio/-/formatio-2.0.0.tgz#84db7e9eb5531df18a8c5e0bfb6e449e55e654b2"
@ -1108,6 +1115,22 @@
dependencies:
samsam "1.3.0"
"@sinonjs/formatio@^3.1.0":
version "3.1.0"
resolved "http://registry.npm.taobao.org/@sinonjs/formatio/download/@sinonjs/formatio-3.1.0.tgz#6ac9d1eb1821984d84c4996726e45d1646d8cce5"
integrity sha1-asnR6xghmE2ExJlnJuRdFkbYzOU=
dependencies:
"@sinonjs/samsam" "^2 || ^3"
"@sinonjs/samsam@^2 || ^3", "@sinonjs/samsam@^3.0.2":
version "3.0.2"
resolved "http://registry.npm.taobao.org/@sinonjs/samsam/download/@sinonjs/samsam-3.0.2.tgz#304fb33bd5585a0b2df8a4c801fcb47fa84d8e43"
integrity sha1-ME+zO9VYWgst+KTIAfy0f6hNjkM=
dependencies:
"@sinonjs/commons" "^1.0.2"
array-from "^2.1.1"
lodash.get "^4.4.2"
"@slack/client@^4.8.0":
version "4.8.0"
resolved "https://registry.yarnpkg.com/@slack/client/-/client-4.8.0.tgz#265606f1cebae1d72f3fdd2cdf7cf1510783dde4"
@ -2883,6 +2906,11 @@ array-flatten@^2.1.0:
resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296"
integrity sha1-Qmu52oQJDBg42BLIFQryCoMx4pY=
array-from@^2.1.1:
version "2.1.1"
resolved "http://registry.npm.taobao.org/array-from/download/array-from-2.1.1.tgz#cfe9d8c26628b9dc5aecc62a9f5d8f1f352c1195"
integrity sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=
array-ify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece"
@ -5041,7 +5069,7 @@ chai-as-promised@^7.0.0:
dependencies:
check-error "^1.0.2"
chai@3.5.0, "chai@>=1.9.2 <4.0.0":
chai@3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/chai/-/chai-3.5.0.tgz#4d02637b067fe958bdbfdd3a40ec56fef7373247"
integrity sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=
@ -5050,7 +5078,7 @@ chai@3.5.0, "chai@>=1.9.2 <4.0.0":
deep-eql "^0.1.3"
type-detect "^1.0.0"
chai@^4.0.1, chai@^4.2.0:
chai@^4.0.1, chai@^4.1.2, chai@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/chai/-/chai-4.2.0.tgz#760aa72cf20e3795e84b12877ce0e83737aa29e5"
integrity sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==
@ -6919,6 +6947,13 @@ debug@3.X, debug@^3.1.0, debug@^3.2.5:
dependencies:
ms "^2.1.1"
debug@^4.1.0:
version "4.1.0"
resolved "http://registry.npm.taobao.org/debug/download/debug-4.1.0.tgz#373687bffa678b38b1cd91f861b63850035ddc87"
integrity sha1-NzaHv/pnizixzZH4YbY4UANd3Ic=
dependencies:
ms "^2.1.1"
debuglog@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492"
@ -13253,6 +13288,11 @@ just-extend@^1.1.27:
resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-1.1.27.tgz#ec6e79410ff914e472652abfa0e603c03d60e905"
integrity sha512-mJVp13Ix6gFo3SBAy9U/kL+oeZqzlYYYLQBwXVBlVzIsZwBqGREnOro24oC/8s8aox+rJhtZ2DiQof++IrkA+g==
just-extend@^4.0.2:
version "4.0.2"
resolved "http://registry.npm.taobao.org/just-extend/download/just-extend-4.0.2.tgz#f3f47f7dfca0f989c55410a7ebc8854b07108afc"
integrity sha1-8/R/ffyg+YnFVBCn68iFSwcQivw=
just-reduce-object@^1.0.3:
version "1.1.0"
resolved "https://registry.yarnpkg.com/just-reduce-object/-/just-reduce-object-1.1.0.tgz#d29d172264f8511c74462de30d72d5838b6967e6"
@ -14295,7 +14335,7 @@ lodash@^4.0.0, lodash@^4.17.5, lodash@^4.2.0, lodash@^4.2.1, lodash@~4.17.10:
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.10.tgz#1b7793cf7259ea38fb3661d4d38b3260af8ae4e7"
integrity sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==
lodash@^4.11.1, lodash@^4.17.2, lodash@^4.3.0, lodash@^4.5.0, lodash@^4.6.1, lodash@^4.8.2, lodash@~4.17.5:
lodash@^4.11.1, lodash@^4.17.2, lodash@^4.3.0, lodash@^4.5.0, lodash@^4.6.1, lodash@~4.17.5:
version "4.17.5"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511"
integrity sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw==
@ -14384,6 +14424,11 @@ lolex@^2.2.0, lolex@^2.3.2:
resolved "https://registry.yarnpkg.com/lolex/-/lolex-2.6.0.tgz#cf9166f3c9dece3cdeb5d6b01fce50f14a1203e3"
integrity sha512-e1UtIo1pbrIqEXib/yMjHciyqkng5lc0rrIbytgjmRgDR9+2ceNIAcwOWSgylRjoEP9VdVguCSRwnNmlbnOUwA==
lolex@^3.0.0:
version "3.0.0"
resolved "http://registry.npm.taobao.org/lolex/download/lolex-3.0.0.tgz#f04ee1a8aa13f60f1abd7b0e8f4213ec72ec193e"
integrity sha1-8E7hqKoT9g8avXsOj0IT7HLsGT4=
long@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/long/-/long-2.4.0.tgz#9fa180bb1d9500cdc29c4156766a1995e1f4524f"
@ -15414,6 +15459,17 @@ nise@^1.2.0:
path-to-regexp "^1.7.0"
text-encoding "^0.6.4"
nise@^1.4.7:
version "1.4.8"
resolved "http://registry.npm.taobao.org/nise/download/nise-1.4.8.tgz#ce91c31e86cf9b2c4cac49d7fcd7f56779bfd6b0"
integrity sha1-zpHDHobPmyxMrEnX/Nf1Z3m/1rA=
dependencies:
"@sinonjs/formatio" "^3.1.0"
just-extend "^4.0.2"
lolex "^2.3.2"
path-to-regexp "^1.7.0"
text-encoding "^0.6.4"
no-case@^2.2.0, no-case@^2.3.2:
version "2.3.2"
resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac"
@ -15426,19 +15482,20 @@ no-ui-slider@1.2.0:
resolved "https://registry.yarnpkg.com/no-ui-slider/-/no-ui-slider-1.2.0.tgz#1f64f5a8b82e6786f3261d82b0cc99b598817e69"
integrity sha1-H2T1qLguZ4bzJh2CsMyZtZiBfmk=
nock@8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/nock/-/nock-8.0.0.tgz#f86d676568c73a3bb2144ebc80791d447bb334d2"
integrity sha1-+G1nZWjHOjuyFE68gHkdRHuzNNI=
nock@10.0.4:
version "10.0.4"
resolved "http://registry.npm.taobao.org/nock/download/nock-10.0.4.tgz#44f5dcfe0a6b09f95d541f6b3f057cfabbbd2a3a"
integrity sha1-RPXc/gprCfldVB9rPwV8+ru9Kjo=
dependencies:
chai ">=1.9.2 <4.0.0"
debug "^2.2.0"
chai "^4.1.2"
debug "^4.1.0"
deep-equal "^1.0.0"
json-stringify-safe "^5.0.1"
lodash "^4.8.2"
lodash "^4.17.5"
mkdirp "^0.5.0"
propagate "0.4.0"
qs "^6.0.2"
propagate "^1.0.0"
qs "^6.5.1"
semver "^5.5.0"
node-ensure@^0.0.0:
version "0.0.0"
@ -15630,9 +15687,9 @@ nodegit-promise@~4.0.0:
dependencies:
asap "~2.0.3"
"nodegit@git+https://github.com/elastic/nodegit.git#6d5c2050426716127f602d2b07d885fef9b8dadb":
version "0.24.0-alpha.1"
resolved "git+https://github.com/elastic/nodegit.git#6d5c2050426716127f602d2b07d885fef9b8dadb"
"nodegit@git+https://github.com/elastic/nodegit.git":
version "0.24.0-alpha.6"
resolved "git+https://github.com/elastic/nodegit.git#54173ad757bc09f155183bb27bce921b02fb8198"
dependencies:
babel-cli "^6.7.7"
babel-preset-es2015 "^6.6.0"
@ -17260,10 +17317,10 @@ prop-types@^15.5.7, prop-types@^15.6.2:
loose-envify "^1.3.1"
object-assign "^4.1.1"
propagate@0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/propagate/-/propagate-0.4.0.tgz#f3fcca0a6fe06736a7ba572966069617c130b481"
integrity sha1-8/zKCm/gZzanulcpZgaWF8EwtIE=
propagate@^1.0.0:
version "1.0.0"
resolved "http://registry.npm.taobao.org/propagate/download/propagate-1.0.0.tgz#00c2daeedda20e87e3782b344adba1cddd6ad709"
integrity sha1-AMLa7t2iDofjeCs0Stuhzd1q1wk=
proper-lockfile@^3.0.2:
version "3.2.0"
@ -17567,7 +17624,7 @@ qs@6.5.1:
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
integrity sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==
qs@6.5.2, qs@^6.0.2, qs@^6.4.0, qs@^6.5.1, qs@~6.5.1, qs@~6.5.2:
qs@6.5.2, qs@^6.4.0, qs@^6.5.1, qs@~6.5.1, qs@~6.5.2:
version "6.5.2"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"
integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==
@ -19764,6 +19821,19 @@ sinon@^5.0.7:
supports-color "^5.1.0"
type-detect "^4.0.5"
sinon@v7.2.2:
version "7.2.2"
resolved "http://registry.npm.taobao.org/sinon/download/sinon-7.2.2.tgz#388ecabd42fa93c592bfc71d35a70894d5a0ca07"
integrity sha1-OI7KvUL6k8WSv8cdNacIlNWgygc=
dependencies:
"@sinonjs/commons" "^1.2.0"
"@sinonjs/formatio" "^3.1.0"
"@sinonjs/samsam" "^3.0.2"
diff "^3.5.0"
lolex "^3.0.0"
nise "^1.4.7"
supports-color "^5.5.0"
sisteransi@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-0.1.1.tgz#5431447d5f7d1675aac667ccd0b865a4994cb3ce"
@ -21549,16 +21619,16 @@ type-detect@0.1.1:
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-0.1.1.tgz#0ba5ec2a885640e470ea4e8505971900dac58822"
integrity sha1-C6XsKohWQORw6k6FBZcZANrFiCI=
type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
type-detect@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-1.0.0.tgz#762217cc06db258ec48908a1298e8b95121e8ea2"
integrity sha1-diIXzAbbJY7EiQihKY6LlRIejqI=
type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
type-is@~1.6.15, type-is@~1.6.16:
version "1.6.16"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.16.tgz#f89ce341541c672b25ee7ae3c73dee3b2be50194"