mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Code] enable ssh protocol, only read ssh key pairs from data folder. (#34412)
This commit is contained in:
parent
6e18d9ca5f
commit
76be6c41c7
13 changed files with 249 additions and 83 deletions
|
@ -163,7 +163,7 @@
|
|||
"@elastic/javascript-typescript-langserver": "^0.1.21",
|
||||
"@elastic/lsp-extension": "^0.1.1",
|
||||
"@elastic/node-crypto": "0.1.2",
|
||||
"@elastic/nodegit": "0.25.0-alpha.11",
|
||||
"@elastic/nodegit": "0.25.0-alpha.12",
|
||||
"@elastic/numeral": "2.3.2",
|
||||
"@kbn/babel-preset": "1.0.0",
|
||||
"@kbn/es-query": "1.0.0",
|
||||
|
|
|
@ -15,6 +15,7 @@ test('Repository url parsing', () => {
|
|||
url: 'https://github.com/apache/sqoop',
|
||||
name: 'sqoop',
|
||||
org: 'apache',
|
||||
protocol: 'https',
|
||||
});
|
||||
|
||||
// Valid git url with .git suffix.
|
||||
|
@ -23,6 +24,7 @@ test('Repository url parsing', () => {
|
|||
uri: 'github.com/apache/sqoop',
|
||||
url: 'https://github.com/apache/sqoop.git',
|
||||
name: 'sqoop',
|
||||
protocol: 'https',
|
||||
org: 'apache',
|
||||
});
|
||||
|
||||
|
@ -39,6 +41,7 @@ test('Repository url parsing', () => {
|
|||
url: 'git://a/b',
|
||||
name: 'b',
|
||||
org: '_',
|
||||
protocol: 'git',
|
||||
});
|
||||
|
||||
const repo5 = RepositoryUtils.buildRepository('git://a/b/c');
|
||||
|
@ -47,6 +50,7 @@ test('Repository url parsing', () => {
|
|||
url: 'git://a/b/c',
|
||||
name: 'c',
|
||||
org: 'b',
|
||||
protocol: 'git',
|
||||
});
|
||||
|
||||
const repo6 = RepositoryUtils.buildRepository('git@github.com:foo/bar.git');
|
||||
|
@ -54,6 +58,7 @@ test('Repository url parsing', () => {
|
|||
uri: 'github.com/foo/bar',
|
||||
url: 'git@github.com:foo/bar.git',
|
||||
name: 'bar',
|
||||
protocol: 'ssh',
|
||||
org: 'foo',
|
||||
});
|
||||
|
||||
|
@ -63,6 +68,7 @@ test('Repository url parsing', () => {
|
|||
url: 'ssh://git@github.com:foo/bar.git',
|
||||
name: 'bar',
|
||||
org: 'foo',
|
||||
protocol: 'ssh',
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -73,6 +79,7 @@ test('Repository url parsing with non standard segments', () => {
|
|||
url: 'git://a/b/c/d',
|
||||
name: 'd',
|
||||
org: 'b_c',
|
||||
protocol: 'git',
|
||||
});
|
||||
|
||||
const repo2 = RepositoryUtils.buildRepository('git://a/b/c/d/e');
|
||||
|
@ -81,6 +88,7 @@ test('Repository url parsing with non standard segments', () => {
|
|||
url: 'git://a/b/c/d/e',
|
||||
name: 'e',
|
||||
org: 'b_c_d',
|
||||
protocol: 'git',
|
||||
});
|
||||
|
||||
const repo3 = RepositoryUtils.buildRepository('git://a');
|
||||
|
@ -88,6 +96,7 @@ test('Repository url parsing with non standard segments', () => {
|
|||
uri: 'a/_/_',
|
||||
url: 'git://a',
|
||||
name: '_',
|
||||
protocol: 'git',
|
||||
org: '_',
|
||||
});
|
||||
});
|
||||
|
@ -99,6 +108,7 @@ test('Repository url parsing with port', () => {
|
|||
url: 'ssh://mine@mydomain.com:27017/gitolite-admin',
|
||||
name: 'gitolite-admin',
|
||||
org: 'mine',
|
||||
protocol: 'ssh',
|
||||
});
|
||||
|
||||
const repo2 = RepositoryUtils.buildRepository(
|
||||
|
@ -108,6 +118,7 @@ test('Repository url parsing with port', () => {
|
|||
uri: 'mydomain.com:27017/elastic/gitolite-admin',
|
||||
url: 'ssh://mine@mydomain.com:27017/elastic/gitolite-admin',
|
||||
name: 'gitolite-admin',
|
||||
protocol: 'ssh',
|
||||
org: 'elastic',
|
||||
});
|
||||
});
|
||||
|
|
|
@ -28,6 +28,7 @@ export class RepositoryUtils {
|
|||
url: repo.href as string,
|
||||
name,
|
||||
org,
|
||||
protocol: repo.protocol,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ export interface Repository {
|
|||
org?: string;
|
||||
defaultBranch?: string;
|
||||
revision?: string;
|
||||
protocol?: string;
|
||||
// The timestamp of next update for this repository.
|
||||
nextUpdateTimestamp?: Date;
|
||||
// The timestamp of next index for this repository.
|
||||
|
|
61
x-pack/plugins/code/server/__tests__/repository_service.ts
Normal file
61
x-pack/plugins/code/server/__tests__/repository_service.ts
Normal file
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import assert from 'assert';
|
||||
// import { generateKeyPairSync } from 'crypto';
|
||||
import fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import path from 'path';
|
||||
import rimraf from 'rimraf';
|
||||
import { RepositoryUtils } from '../../common/repository_utils';
|
||||
import { RepositoryService } from '../repository_service';
|
||||
import { ConsoleLogger } from '../utils/console_logger';
|
||||
|
||||
describe('repository service test', () => {
|
||||
const log = new ConsoleLogger();
|
||||
const baseDir = fs.mkdtempSync(path.join(os.tmpdir(), 'code_test'));
|
||||
log.debug(baseDir);
|
||||
const repoDir = path.join(baseDir, 'repo');
|
||||
const credsDir = path.join(baseDir, 'credentials');
|
||||
// @ts-ignore
|
||||
before(() => {
|
||||
fs.mkdirSync(credsDir);
|
||||
fs.mkdirSync(repoDir);
|
||||
});
|
||||
// @ts-ignore
|
||||
after(() => {
|
||||
return rimraf.sync(baseDir);
|
||||
});
|
||||
const service = new RepositoryService(repoDir, credsDir, log);
|
||||
|
||||
it('can not clone a repo by ssh without a key', async () => {
|
||||
const repo = RepositoryUtils.buildRepository(
|
||||
'git@github.com:elastic/TypeScript-Node-Starter.git'
|
||||
);
|
||||
await assert.rejects(service.clone(repo));
|
||||
// @ts-ignore
|
||||
}).timeout(60000);
|
||||
|
||||
/*it('can clone a repo by ssh with a key', async () => {
|
||||
|
||||
const repo = RepositoryUtils.buildRepository('git@github.com:elastic/code.git');
|
||||
const { publicKey, privateKey } = generateKeyPairSync('rsa', {
|
||||
modulusLength: 4096,
|
||||
publicKeyEncoding: {
|
||||
type: 'pkcs1',
|
||||
format: 'pem',
|
||||
},
|
||||
privateKeyEncoding: {
|
||||
type: 'pkcs1',
|
||||
format: 'pem',
|
||||
},
|
||||
});
|
||||
fs.writeFileSync(path.join(credsDir, 'id_rsa.pub'), publicKey);
|
||||
fs.writeFileSync(path.join(credsDir, 'id_rsa'), privateKey);
|
||||
const result = await service.clone(repo);
|
||||
assert.ok(fs.existsSync(path.join(repoDir, result.repo.uri)));
|
||||
}).timeout(60000); */
|
||||
});
|
|
@ -54,7 +54,11 @@ export class CloneWorker extends AbstractGitWorker {
|
|||
}
|
||||
|
||||
this.log.info(`Execute clone job for ${url}`);
|
||||
const repoService = this.repoServiceFactory.newInstance(this.serverOptions.repoPath, this.log);
|
||||
const repoService = this.repoServiceFactory.newInstance(
|
||||
this.serverOptions.repoPath,
|
||||
this.serverOptions.credsPath,
|
||||
this.log
|
||||
);
|
||||
const repo = RepositoryUtils.buildRepository(url);
|
||||
return await repoService.clone(repo, (progress: number, cloneProgress?: CloneProgress) => {
|
||||
this.updateProgress(repo.uri, progress, cloneProgress);
|
||||
|
|
|
@ -44,7 +44,11 @@ export class DeleteWorker extends AbstractWorker {
|
|||
this.cancellationService.cancelIndexJob(uri);
|
||||
|
||||
// 2. Delete repository on local fs.
|
||||
const repoService = this.repoServiceFactory.newInstance(this.serverOptions.repoPath, this.log);
|
||||
const repoService = this.repoServiceFactory.newInstance(
|
||||
this.serverOptions.repoPath,
|
||||
this.serverOptions.credsPath,
|
||||
this.log
|
||||
);
|
||||
const deleteRepoPromise = this.deletePromiseWrapper(repoService.remove(uri), 'git data', uri);
|
||||
|
||||
// 3. Delete ES indices and aliases
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CloneWorkerResult } from '../../model';
|
||||
import { CloneWorkerResult, Repository } from '../../model';
|
||||
import { EsClient, Esqueue } from '../lib/esqueue';
|
||||
import { Logger } from '../log';
|
||||
import { RepositoryServiceFactory } from '../repository_service_factory';
|
||||
|
@ -26,10 +26,14 @@ export class UpdateWorker extends AbstractGitWorker {
|
|||
}
|
||||
|
||||
public async executeJob(job: Job) {
|
||||
const { uri } = job.payload;
|
||||
this.log.info(`Execute update job for ${uri}`);
|
||||
const repoService = this.repoServiceFactory.newInstance(this.serverOptions.repoPath, this.log);
|
||||
return await repoService.update(uri);
|
||||
const repo: Repository = job.payload;
|
||||
this.log.info(`Execute update job for ${repo.uri}`);
|
||||
const repoService = this.repoServiceFactory.newInstance(
|
||||
this.serverOptions.repoPath,
|
||||
this.serverOptions.credsPath,
|
||||
this.log
|
||||
);
|
||||
return await repoService.update(repo);
|
||||
}
|
||||
|
||||
public async onJobCompleted(job: Job, res: CloneWorkerResult) {
|
||||
|
|
|
@ -21,9 +21,15 @@ import { Logger } from './log';
|
|||
|
||||
export type CloneProgressHandler = (progress: number, cloneProgress?: CloneProgress) => void;
|
||||
|
||||
const SSH_AUTH_ERROR = new Error('Failed to authenticate SSH session');
|
||||
|
||||
// This is the service for any kind of repository handling, e.g. clone, update, delete, etc.
|
||||
export class RepositoryService {
|
||||
constructor(private readonly repoVolPath: string, private log: Logger) {}
|
||||
constructor(
|
||||
private readonly repoVolPath: string,
|
||||
private readonly credsPath: string,
|
||||
private log: Logger
|
||||
) {}
|
||||
|
||||
public async clone(repo: Repository, handler?: CloneProgressHandler): Promise<CloneWorkerResult> {
|
||||
if (!repo) {
|
||||
|
@ -34,7 +40,7 @@ export class RepositoryService {
|
|||
this.log.info(`Repository exist in local path. Do update instead of clone.`);
|
||||
try {
|
||||
// Do update instead of clone if the local repo exists.
|
||||
const updateRes = await this.update(repo.uri);
|
||||
const updateRes = await this.update(repo);
|
||||
return {
|
||||
uri: repo.uri,
|
||||
repo: {
|
||||
|
@ -61,65 +67,11 @@ export class RepositoryService {
|
|||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Go head with the actual clone.
|
||||
try {
|
||||
const gitRepo = await Git.Clone.clone(repo.url, localPath, {
|
||||
bare: 1,
|
||||
fetchOpts: {
|
||||
callbacks: {
|
||||
transferProgress: {
|
||||
// Make the progress update less frequent to avoid too many
|
||||
// concurrently update of git status in elasticsearch.
|
||||
throttle: 1000,
|
||||
callback: (stats: any) => {
|
||||
const progress =
|
||||
(100 * (stats.receivedObjects() + stats.indexedObjects())) /
|
||||
(stats.totalObjects() * 2);
|
||||
const cloneProgress = {
|
||||
isCloned: false,
|
||||
receivedObjects: stats.receivedObjects(),
|
||||
indexedObjects: stats.indexedObjects(),
|
||||
totalObjects: stats.totalObjects(),
|
||||
localObjects: stats.localObjects(),
|
||||
totalDeltas: stats.totalDeltas(),
|
||||
indexedDeltas: stats.indexedDeltas(),
|
||||
receivedBytes: stats.receivedBytes(),
|
||||
};
|
||||
if (handler) {
|
||||
handler(progress, cloneProgress);
|
||||
}
|
||||
},
|
||||
} as any,
|
||||
certificateCheck: () => {
|
||||
// Ignore cert check failures.
|
||||
return 1;
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const headCommit = await gitRepo.getHeadCommit();
|
||||
const headRevision = headCommit.sha();
|
||||
const currentBranch = await gitRepo.getCurrentBranch();
|
||||
const currentBranchName = currentBranch.shorthand();
|
||||
this.log.info(
|
||||
`Clone repository from ${
|
||||
repo.url
|
||||
} done with head revision ${headRevision} and default branch ${currentBranchName}`
|
||||
);
|
||||
return {
|
||||
uri: repo.uri,
|
||||
repo: {
|
||||
...repo,
|
||||
defaultBranch: currentBranchName,
|
||||
revision: headRevision,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
const msg = `Clone repository from ${repo.url} error.`;
|
||||
this.log.error(msg);
|
||||
this.log.error(error);
|
||||
throw new Error(msg);
|
||||
if (repo.protocol === 'ssh') {
|
||||
return this.tryWithKeys(key => this.doClone(repo, localPath, handler, key));
|
||||
} else {
|
||||
return await this.doClone(repo, localPath, handler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -139,12 +91,22 @@ export class RepositoryService {
|
|||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async update(uri: string): Promise<UpdateWorkerResult> {
|
||||
public async update(repo: Repository): Promise<UpdateWorkerResult> {
|
||||
if (repo.protocol === 'ssh') {
|
||||
return await this.tryWithKeys(key => this.doUpdate(repo.uri, key));
|
||||
} else {
|
||||
return await this.doUpdate(repo.uri);
|
||||
}
|
||||
}
|
||||
public async doUpdate(uri: string, key?: string): Promise<UpdateWorkerResult> {
|
||||
const localPath = RepositoryUtils.repositoryLocalPath(this.repoVolPath, uri);
|
||||
try {
|
||||
const repo = await Git.Repository.open(localPath);
|
||||
await repo.fetchAll();
|
||||
await repo.fetchAll({
|
||||
callbacks: {
|
||||
credentials: this.credentialFunc(key),
|
||||
},
|
||||
});
|
||||
// TODO(mengwei): deal with the case when the default branch has changed.
|
||||
const currentBranch = await repo.getCurrentBranch();
|
||||
const currentBranchName = currentBranch.shorthand();
|
||||
|
@ -162,9 +124,127 @@ export class RepositoryService {
|
|||
revision: headCommit.sha(),
|
||||
};
|
||||
} catch (error) {
|
||||
const msg = `update repository ${uri} error: ${error}`;
|
||||
this.log.error(msg);
|
||||
throw new Error(msg);
|
||||
if (error.message && error.message.startsWith(SSH_AUTH_ERROR.message)) {
|
||||
throw SSH_AUTH_ERROR;
|
||||
} else {
|
||||
const msg = `update repository ${uri} error: ${error}`;
|
||||
this.log.error(msg);
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* read credentials dir, try using each privateKey until action is successful
|
||||
* @param action
|
||||
*/
|
||||
private async tryWithKeys<R>(action: (key: string) => Promise<R>): Promise<R> {
|
||||
const files = fs.existsSync(this.credsPath)
|
||||
? new Set(fs.readdirSync(this.credsPath))
|
||||
: new Set();
|
||||
for (const f of files) {
|
||||
if (f.endsWith('.pub')) {
|
||||
const privateKey = f.slice(0, f.length - 4);
|
||||
if (files.has(privateKey)) {
|
||||
try {
|
||||
this.log.debug(`try with key ${privateKey}`);
|
||||
return await action(privateKey);
|
||||
} catch (e) {
|
||||
if (e !== SSH_AUTH_ERROR) {
|
||||
throw e;
|
||||
}
|
||||
// continue to try another key
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw SSH_AUTH_ERROR;
|
||||
}
|
||||
|
||||
private async doClone(
|
||||
repo: Repository,
|
||||
localPath: string,
|
||||
handler?: CloneProgressHandler,
|
||||
keyFile?: string
|
||||
) {
|
||||
try {
|
||||
const gitRepo = await Git.Clone.clone(repo.url, localPath, {
|
||||
bare: 1,
|
||||
fetchOpts: {
|
||||
callbacks: {
|
||||
transferProgress: {
|
||||
// Make the progress update less frequent to avoid too many
|
||||
// concurrently update of git status in elasticsearch.
|
||||
throttle: 1000,
|
||||
callback: (stats: any) => {
|
||||
if (handler) {
|
||||
const progress =
|
||||
(100 * (stats.receivedObjects() + stats.indexedObjects())) /
|
||||
(stats.totalObjects() * 2);
|
||||
const cloneProgress = {
|
||||
isCloned: false,
|
||||
receivedObjects: stats.receivedObjects(),
|
||||
indexedObjects: stats.indexedObjects(),
|
||||
totalObjects: stats.totalObjects(),
|
||||
localObjects: stats.localObjects(),
|
||||
totalDeltas: stats.totalDeltas(),
|
||||
indexedDeltas: stats.indexedDeltas(),
|
||||
receivedBytes: stats.receivedBytes(),
|
||||
};
|
||||
handler(progress, cloneProgress);
|
||||
}
|
||||
},
|
||||
} as any,
|
||||
certificateCheck: () => {
|
||||
// Ignore cert check failures.
|
||||
return 1;
|
||||
},
|
||||
credentials: this.credentialFunc(keyFile),
|
||||
},
|
||||
},
|
||||
});
|
||||
const headCommit = await gitRepo.getHeadCommit();
|
||||
const headRevision = headCommit.sha();
|
||||
const currentBranch = await gitRepo.getCurrentBranch();
|
||||
const currentBranchName = currentBranch.shorthand();
|
||||
this.log.info(
|
||||
`Clone repository from ${
|
||||
repo.url
|
||||
} done with head revision ${headRevision} and default branch ${currentBranchName}`
|
||||
);
|
||||
return {
|
||||
uri: repo.uri,
|
||||
repo: {
|
||||
...repo,
|
||||
defaultBranch: currentBranchName,
|
||||
revision: headRevision,
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (error.message && error.message.startsWith(SSH_AUTH_ERROR.message)) {
|
||||
throw SSH_AUTH_ERROR;
|
||||
} else {
|
||||
const msg = `Clone repository from ${repo.url} error.`;
|
||||
this.log.error(msg);
|
||||
this.log.error(error);
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private credentialFunc(keyFile: string | undefined) {
|
||||
return (url: string, userName: string) => {
|
||||
if (keyFile) {
|
||||
this.log.debug(`try with key ${path.join(this.credsPath, keyFile)}`);
|
||||
return Git.Cred.sshKeyNew(
|
||||
userName,
|
||||
path.join(this.credsPath, `${keyFile}.pub`),
|
||||
path.join(this.credsPath, keyFile),
|
||||
''
|
||||
);
|
||||
} else {
|
||||
return Git.Cred.defaultNew();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import { Logger } from './log';
|
|||
import { RepositoryService } from './repository_service';
|
||||
|
||||
export class RepositoryServiceFactory {
|
||||
public newInstance(repoPath: string, log: Logger): RepositoryService {
|
||||
return new RepositoryService(repoPath, log);
|
||||
public newInstance(repoPath: string, credsPath: string, log: Logger): RepositoryService {
|
||||
return new RepositoryService(repoPath, credsPath, log);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,9 +50,7 @@ export class UpdateScheduler extends AbstractScheduler {
|
|||
cloneStatus.cloneProgress.isCloned &&
|
||||
cloneStatus.progress === WorkerReservedProgress.COMPLETED
|
||||
) {
|
||||
const payload = {
|
||||
uri: repo.uri,
|
||||
};
|
||||
const payload = repo;
|
||||
|
||||
// Update the next repo update timestamp.
|
||||
const nextRepoUpdateTimestamp = this.repoNextSchedulingTime();
|
||||
|
|
|
@ -26,6 +26,8 @@ export class ServerOptions {
|
|||
|
||||
public readonly repoPath = resolve(this.config.get('path.data'), 'code/repos');
|
||||
|
||||
public readonly credsPath = resolve(this.config.get('path.data'), 'code/credentials');
|
||||
|
||||
public readonly langServerPath = resolve(this.config.get('path.data'), 'code/langserver');
|
||||
|
||||
public readonly jdtWorkspacePath = resolve(this.config.get('path.data'), 'code/jdt_ws');
|
||||
|
|
|
@ -921,10 +921,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@elastic/node-crypto/-/node-crypto-0.1.2.tgz#c18ac282f635e88f041cc1555d806e492ca8f3b1"
|
||||
integrity sha1-wYrCgvY16I8EHMFVXYBuSSyo87E=
|
||||
|
||||
"@elastic/nodegit@0.25.0-alpha.11":
|
||||
version "0.25.0-alpha.11"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/nodegit/-/nodegit-0.25.0-alpha.11.tgz#f19acf455eb8bc129135e9f45cd17c4caa721c13"
|
||||
integrity sha512-r8DqibYZyOLU9e37B5AGkHpUJWoXRToFA1kjrPDCjFSI/gl736mFr8jm8xIrp32yNnSewyW3avSRULQ3RgfubA==
|
||||
"@elastic/nodegit@0.25.0-alpha.12":
|
||||
version "0.25.0-alpha.12"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/nodegit/-/nodegit-0.25.0-alpha.12.tgz#6dffdbea640f8b297af75e96f84c802427dff7f7"
|
||||
integrity sha512-wKTji45igEw3VP2DmgLXpDX3n6WwOy0y4g/Xs385pymn9HWPVyg/UdWLJyXLrl0V//5EDSeqehMqOwTqAQ+qyA==
|
||||
dependencies:
|
||||
fs-extra "^7.0.0"
|
||||
json5 "^2.1.0"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue