mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Introduce packaging tests for Docker (#46599)
Closes #37617. Add packaging tests for our Docker images, similar to what we have for RPMs or Debian packages. This works by running a container and probing it e.g. via `docker exec`. Test can also be run in Vagrant, by exporting the Docker images to disk and loading them again in VMs. Docker is installed via `Vagrantfile` in a selection of boxes.
This commit is contained in:
parent
3d4a7d0c6c
commit
da59dfe09d
16 changed files with 817 additions and 30 deletions
105
Vagrantfile
vendored
105
Vagrantfile
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
# vim: ft=ruby ts=2 sw=2 sts=2 et:
|
||||
|
||||
# This Vagrantfile exists to test packaging. Read more about its use in the
|
||||
# vagrant section in TESTING.asciidoc.
|
||||
|
@ -63,6 +63,7 @@ Vagrant.configure(2) do |config|
|
|||
# Install Jayatana so we can work around it being present.
|
||||
[ -f /usr/share/java/jayatanaag.jar ] || install jayatana
|
||||
SHELL
|
||||
ubuntu_docker config
|
||||
end
|
||||
end
|
||||
'ubuntu-1804'.tap do |box|
|
||||
|
@ -72,6 +73,7 @@ Vagrant.configure(2) do |config|
|
|||
# Install Jayatana so we can work around it being present.
|
||||
[ -f /usr/share/java/jayatanaag.jar ] || install jayatana
|
||||
SHELL
|
||||
ubuntu_docker config
|
||||
end
|
||||
end
|
||||
'debian-8'.tap do |box|
|
||||
|
@ -87,6 +89,7 @@ Vagrant.configure(2) do |config|
|
|||
config.vm.define box, define_opts do |config|
|
||||
config.vm.box = 'elastic/debian-9-x86_64'
|
||||
deb_common config, box
|
||||
deb_docker config
|
||||
end
|
||||
end
|
||||
'centos-6'.tap do |box|
|
||||
|
@ -99,6 +102,7 @@ Vagrant.configure(2) do |config|
|
|||
config.vm.define box, define_opts do |config|
|
||||
config.vm.box = 'elastic/centos-7-x86_64'
|
||||
rpm_common config, box
|
||||
rpm_docker config
|
||||
end
|
||||
end
|
||||
'oel-6'.tap do |box|
|
||||
|
@ -117,12 +121,14 @@ Vagrant.configure(2) do |config|
|
|||
config.vm.define box, define_opts do |config|
|
||||
config.vm.box = 'elastic/fedora-28-x86_64'
|
||||
dnf_common config, box
|
||||
dnf_docker config
|
||||
end
|
||||
end
|
||||
'fedora-29'.tap do |box|
|
||||
config.vm.define box, define_opts do |config|
|
||||
config.vm.box = 'elastic/fedora-28-x86_64'
|
||||
dnf_common config, box
|
||||
dnf_docker config
|
||||
end
|
||||
end
|
||||
'opensuse-42'.tap do |box|
|
||||
|
@ -185,6 +191,63 @@ def deb_common(config, name, extra: '')
|
|||
)
|
||||
end
|
||||
|
||||
def ubuntu_docker(config)
|
||||
config.vm.provision 'install Docker using apt', type: 'shell', inline: <<-SHELL
|
||||
# Install packages to allow apt to use a repository over HTTPS
|
||||
apt-get install -y \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common
|
||||
|
||||
# Add Docker’s official GPG key
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
|
||||
# Set up the stable Docker repository
|
||||
add-apt-repository \
|
||||
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
|
||||
$(lsb_release -cs) \
|
||||
stable"
|
||||
|
||||
# Install Docker. Unlike Fedora and CentOS, this also start the daemon.
|
||||
apt-get update
|
||||
apt-get install -y docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# Add vagrant to the Docker group, so that it can run commands
|
||||
usermod -aG docker vagrant
|
||||
SHELL
|
||||
end
|
||||
|
||||
|
||||
def deb_docker(config)
|
||||
config.vm.provision 'install Docker using apt', type: 'shell', inline: <<-SHELL
|
||||
# Install packages to allow apt to use a repository over HTTPS
|
||||
apt-get install -y \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg2 \
|
||||
software-properties-common
|
||||
|
||||
# Add Docker’s official GPG key
|
||||
curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add -
|
||||
|
||||
# Set up the stable Docker repository
|
||||
add-apt-repository \
|
||||
"deb [arch=amd64] https://download.docker.com/linux/debian \
|
||||
$(lsb_release -cs) \
|
||||
stable"
|
||||
|
||||
# Install Docker. Unlike Fedora and CentOS, this also start the daemon.
|
||||
apt-get update
|
||||
apt-get install -y docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# Add vagrant to the Docker group, so that it can run commands
|
||||
usermod -aG docker vagrant
|
||||
SHELL
|
||||
end
|
||||
|
||||
def rpm_common(config, name)
|
||||
linux_common(
|
||||
config,
|
||||
|
@ -195,6 +258,25 @@ def rpm_common(config, name)
|
|||
)
|
||||
end
|
||||
|
||||
def rpm_docker(config)
|
||||
config.vm.provision 'install Docker using yum', type: 'shell', inline: <<-SHELL
|
||||
# Install prerequisites
|
||||
yum install -y yum-utils device-mapper-persistent-data lvm2
|
||||
|
||||
# Add repository
|
||||
yum-config-manager -y --add-repo https://download.docker.com/linux/centos/docker-ce.repo
|
||||
|
||||
# Install Docker
|
||||
yum install -y docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# Start Docker
|
||||
systemctl enable --now docker
|
||||
|
||||
# Add vagrant to the Docker group, so that it can run commands
|
||||
usermod -aG docker vagrant
|
||||
SHELL
|
||||
end
|
||||
|
||||
def dnf_common(config, name)
|
||||
# Autodetect doesn't work....
|
||||
if Vagrant.has_plugin?('vagrant-cachier')
|
||||
|
@ -211,6 +293,25 @@ def dnf_common(config, name)
|
|||
)
|
||||
end
|
||||
|
||||
def dnf_docker(config)
|
||||
config.vm.provision 'install Docker using dnf', type: 'shell', inline: <<-SHELL
|
||||
# Install prerequisites
|
||||
dnf -y install dnf-plugins-core
|
||||
|
||||
# Add repository
|
||||
dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo
|
||||
|
||||
# Install Docker
|
||||
dnf install -y docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# Start Docker
|
||||
systemctl enable --now docker
|
||||
|
||||
# Add vagrant to the Docker group, so that it can run commands
|
||||
usermod -aG docker vagrant
|
||||
SHELL
|
||||
end
|
||||
|
||||
def suse_common(config, name, extra: '')
|
||||
linux_common(
|
||||
config,
|
||||
|
@ -268,7 +369,7 @@ def linux_common(config,
|
|||
|
||||
# This prevents leftovers from previous tests using the
|
||||
# same VM from messing up the current test
|
||||
config.vm.provision 'clean es installs in tmp', run: 'always', type: 'shell', inline: <<-SHELL
|
||||
config.vm.provision 'clean es installs in tmp', type: 'shell', inline: <<-SHELL
|
||||
rm -rf /tmp/elasticsearch*
|
||||
SHELL
|
||||
|
||||
|
|
|
@ -319,10 +319,14 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
|
||||
List<ElasticsearchDistribution> upgradeDistros = new ArrayList<>();
|
||||
|
||||
for (Type type : Arrays.asList(Type.DEB, Type.RPM)) {
|
||||
for (Type type : Arrays.asList(Type.DEB, Type.RPM, Type.DOCKER)) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch(), currentDistros);
|
||||
// We should never add a Docker distro with bundledJdk == false
|
||||
boolean skip = type == Type.DOCKER && bundledJdk == false;
|
||||
if (skip == false) {
|
||||
addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch(), currentDistros);
|
||||
}
|
||||
}
|
||||
}
|
||||
// upgrade version is always bundled jdk
|
||||
|
@ -386,6 +390,11 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
private static String destructiveDistroTestTaskName(ElasticsearchDistribution distro) {
|
||||
return "destructiveDistroTest." + distroId(distro.getType(), distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk());
|
||||
Type type = distro.getType();
|
||||
return "destructiveDistroTest." + distroId(
|
||||
type,
|
||||
distro.getPlatform(),
|
||||
distro.getFlavor(),
|
||||
distro.getBundledJdk());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,8 +93,8 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
|
|||
// for the distribution as a file, just depend on the artifact directly
|
||||
dependencies.add(distribution.configuration.getName(), dependencyNotation(project, distribution));
|
||||
|
||||
// no extraction allowed for rpm or deb
|
||||
if (distribution.getType() != Type.RPM && distribution.getType() != Type.DEB) {
|
||||
// no extraction allowed for rpm, deb or docker
|
||||
if (distribution.getType().shouldExtract()) {
|
||||
// for the distribution extracted, add a root level task that does the extraction, and depend on that
|
||||
// extracted configuration as an artifact consisting of the extracted distribution directory
|
||||
dependencies.add(distribution.getExtracted().configuration.getName(),
|
||||
|
@ -221,7 +221,6 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
private static Dependency projectDependency(Project project, String projectPath, String projectConfig) {
|
||||
|
||||
if (project.findProject(projectPath) == null) {
|
||||
throw new GradleException("no project [" + projectPath + "], project names: " + project.getRootProject().getAllprojects());
|
||||
}
|
||||
|
@ -233,11 +232,20 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
|
|||
|
||||
private static String distributionProjectPath(ElasticsearchDistribution distribution) {
|
||||
String projectPath = ":distribution";
|
||||
if (distribution.getType() == Type.INTEG_TEST_ZIP) {
|
||||
projectPath += ":archives:integ-test-zip";
|
||||
} else {
|
||||
projectPath += distribution.getType() == Type.ARCHIVE ? ":archives:" : ":packages:";
|
||||
projectPath += distributionProjectName(distribution);
|
||||
switch (distribution.getType()) {
|
||||
case INTEG_TEST_ZIP:
|
||||
projectPath += ":archives:integ-test-zip";
|
||||
break;
|
||||
|
||||
case DOCKER:
|
||||
projectPath += ":docker:";
|
||||
projectPath += distributionProjectName(distribution);
|
||||
break;
|
||||
|
||||
default:
|
||||
projectPath += distribution.getType() == Type.ARCHIVE ? ":archives:" : ":packages:";
|
||||
projectPath += distributionProjectName(distribution);
|
||||
break;
|
||||
}
|
||||
return projectPath;
|
||||
}
|
||||
|
@ -250,9 +258,12 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
|
|||
if (distribution.getBundledJdk() == false) {
|
||||
projectName += "no-jdk-";
|
||||
}
|
||||
|
||||
if (distribution.getType() == Type.ARCHIVE) {
|
||||
Platform platform = distribution.getPlatform();
|
||||
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
|
||||
} else if (distribution.getType() == Type.DOCKER) {
|
||||
projectName += "docker-export";
|
||||
} else {
|
||||
projectName += distribution.getType();
|
||||
}
|
||||
|
|
|
@ -46,12 +46,25 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
INTEG_TEST_ZIP,
|
||||
ARCHIVE,
|
||||
RPM,
|
||||
DEB;
|
||||
DEB,
|
||||
DOCKER;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return super.toString().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
public boolean shouldExtract() {
|
||||
switch (this) {
|
||||
case DEB:
|
||||
case DOCKER:
|
||||
case RPM:
|
||||
return false;
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum Flavor {
|
||||
|
@ -171,11 +184,16 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
}
|
||||
|
||||
public Extracted getExtracted() {
|
||||
if (getType() == Type.RPM || getType() == Type.DEB) {
|
||||
throw new UnsupportedOperationException("distribution type [" + getType() + "] for " +
|
||||
"elasticsearch distribution [" + name + "] cannot be extracted");
|
||||
switch (getType()) {
|
||||
case DEB:
|
||||
case DOCKER:
|
||||
case RPM:
|
||||
throw new UnsupportedOperationException("distribution type [" + getType() + "] for " +
|
||||
"elasticsearch distribution [" + name + "] cannot be extracted");
|
||||
|
||||
default:
|
||||
return extracted;
|
||||
}
|
||||
return extracted;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -217,7 +235,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
if (platform.isPresent() == false) {
|
||||
platform.set(CURRENT_PLATFORM);
|
||||
}
|
||||
} else { // rpm or deb
|
||||
} else { // rpm, deb or docker
|
||||
if (platform.isPresent()) {
|
||||
throw new IllegalArgumentException("platform not allowed for elasticsearch distribution ["
|
||||
+ name + "] of type [" + getType() + "]");
|
||||
|
|
|
@ -186,3 +186,37 @@ assemble.dependsOn "buildDockerImage"
|
|||
if (tasks.findByName("composePull")) {
|
||||
tasks.composePull.enabled = false
|
||||
}
|
||||
|
||||
/*
|
||||
* The export subprojects write out the generated Docker images to disk, so
|
||||
* that they can be easily reloaded, for example into a VM.
|
||||
*/
|
||||
subprojects { Project subProject ->
|
||||
if (subProject.name.contains('docker-export')) {
|
||||
apply plugin: 'distribution'
|
||||
|
||||
final boolean oss = subProject.name.startsWith('oss')
|
||||
|
||||
def exportTaskName = taskName("export", oss, "DockerImage")
|
||||
def buildTaskName = taskName("build", oss, "DockerImage")
|
||||
def tarFile = "${parent.projectDir}/build/elasticsearch${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
|
||||
|
||||
final Task exportDockerImageTask = task(exportTaskName, type: LoggedExec) {
|
||||
executable 'docker'
|
||||
args "save",
|
||||
"-o",
|
||||
tarFile,
|
||||
"elasticsearch${oss ? '-oss' : ''}:test"
|
||||
}
|
||||
|
||||
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
|
||||
|
||||
artifacts.add('default', file(tarFile)) {
|
||||
type 'tar'
|
||||
name "elasticsearch${oss ? '-oss' : ''}"
|
||||
builtBy exportTaskName
|
||||
}
|
||||
|
||||
assemble.dependsOn exportTaskName
|
||||
}
|
||||
}
|
||||
|
|
2
distribution/docker/docker-export/build.gradle
Normal file
2
distribution/docker/docker-export/build.gradle
Normal file
|
@ -0,0 +1,2 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
2
distribution/docker/oss-docker-export/build.gradle
Normal file
2
distribution/docker/oss-docker-export/build.gradle
Normal file
|
@ -0,0 +1,2 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
|
@ -0,0 +1,225 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.packaging.test;
|
||||
|
||||
import org.apache.http.client.fluent.Request;
|
||||
import org.elasticsearch.packaging.util.Distribution;
|
||||
import org.elasticsearch.packaging.util.Docker.DockerShell;
|
||||
import org.elasticsearch.packaging.util.Installation;
|
||||
import org.elasticsearch.packaging.util.ServerUtils;
|
||||
import org.elasticsearch.packaging.util.Shell.Result;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.nio.file.attribute.PosixFilePermissions.fromString;
|
||||
import static org.elasticsearch.packaging.util.Docker.assertPermissionsAndOwnership;
|
||||
import static org.elasticsearch.packaging.util.Docker.copyFromContainer;
|
||||
import static org.elasticsearch.packaging.util.Docker.ensureImageIsLoaded;
|
||||
import static org.elasticsearch.packaging.util.Docker.existsInContainer;
|
||||
import static org.elasticsearch.packaging.util.Docker.removeContainer;
|
||||
import static org.elasticsearch.packaging.util.Docker.runContainer;
|
||||
import static org.elasticsearch.packaging.util.Docker.verifyContainerInstallation;
|
||||
import static org.elasticsearch.packaging.util.Docker.waitForPathToExist;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p660;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.append;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.getTempDir;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.mkdir;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.rm;
|
||||
import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
|
||||
import static org.elasticsearch.packaging.util.ServerUtils.waitForElasticsearch;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.Matchers.emptyString;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
public class DockerTests extends PackagingTestCase {
|
||||
protected DockerShell sh;
|
||||
|
||||
@BeforeClass
|
||||
public static void filterDistros() {
|
||||
assumeTrue("only Docker", distribution.packaging == Distribution.Packaging.DOCKER);
|
||||
|
||||
ensureImageIsLoaded(distribution);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void cleanup() {
|
||||
// runContainer also calls this, so we don't need this method to be annotated as `@After`
|
||||
removeContainer();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupTest() throws Exception {
|
||||
sh = new DockerShell();
|
||||
installation = runContainer(distribution());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the Docker image can be run, and that it passes various checks.
|
||||
*/
|
||||
public void test10Install() {
|
||||
verifyContainerInstallation(installation, distribution());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that no plugins are initially active.
|
||||
*/
|
||||
public void test20PluginsListWithNoPlugins() {
|
||||
final Installation.Executables bin = installation.executables();
|
||||
final Result r = sh.run(bin.elasticsearchPlugin + " list");
|
||||
|
||||
assertThat("Expected no plugins to be listed", r.stdout, emptyString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that a keystore can be manually created using the provided CLI tool.
|
||||
*/
|
||||
public void test40CreateKeystoreManually() throws InterruptedException {
|
||||
final Installation.Executables bin = installation.executables();
|
||||
|
||||
final Path keystorePath = installation.config("elasticsearch.keystore");
|
||||
|
||||
waitForPathToExist(keystorePath);
|
||||
|
||||
// Move the auto-created one out of the way, or else the CLI prompts asks us to confirm
|
||||
sh.run("mv " + keystorePath + " " + keystorePath + ".bak");
|
||||
|
||||
sh.run(bin.elasticsearchKeystore + " create");
|
||||
|
||||
final Result r = sh.run(bin.elasticsearchKeystore + " list");
|
||||
assertThat(r.stdout, containsString("keystore.seed"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Send some basic index, count and delete requests, in order to check that the installation
|
||||
* is minimally functional.
|
||||
*/
|
||||
public void test50BasicApiTests() throws Exception {
|
||||
waitForElasticsearch(installation);
|
||||
|
||||
assertTrue(existsInContainer(installation.logs.resolve("gc.log")));
|
||||
|
||||
ServerUtils.runElasticsearchTests();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the default keystore is automatically created
|
||||
*/
|
||||
public void test60AutoCreateKeystore() throws Exception {
|
||||
final Path keystorePath = installation.config("elasticsearch.keystore");
|
||||
|
||||
waitForPathToExist(keystorePath);
|
||||
|
||||
assertPermissionsAndOwnership(keystorePath, p660);
|
||||
|
||||
final Installation.Executables bin = installation.executables();
|
||||
final Result result = sh.run(bin.elasticsearchKeystore + " list");
|
||||
assertThat(result.stdout, containsString("keystore.seed"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the default config can be overridden using a bind mount, and that env vars are respected
|
||||
*/
|
||||
public void test70BindMountCustomPathConfAndJvmOptions() throws Exception {
|
||||
final Path tempConf = getTempDir().resolve("esconf-alternate");
|
||||
|
||||
try {
|
||||
mkdir(tempConf);
|
||||
copyFromContainer(installation.config("elasticsearch.yml"), tempConf.resolve("elasticsearch.yml"));
|
||||
copyFromContainer(installation.config("log4j2.properties"), tempConf.resolve("log4j2.properties"));
|
||||
|
||||
// we have to disable Log4j from using JMX lest it will hit a security
|
||||
// manager exception before we have configured logging; this will fail
|
||||
// startup since we detect usages of logging before it is configured
|
||||
final String jvmOptions =
|
||||
"-Xms512m\n" +
|
||||
"-Xmx512m\n" +
|
||||
"-Dlog4j2.disable.jmx=true\n";
|
||||
append(tempConf.resolve("jvm.options"), jvmOptions);
|
||||
|
||||
// Make the temp directory and contents accessible when bind-mounted
|
||||
Files.setPosixFilePermissions(tempConf, fromString("rwxrwxrwx"));
|
||||
|
||||
// Restart the container
|
||||
removeContainer();
|
||||
runContainer(distribution(), tempConf, Map.of(
|
||||
"ES_JAVA_OPTS", "-XX:-UseCompressedOops"
|
||||
));
|
||||
|
||||
waitForElasticsearch(installation);
|
||||
|
||||
final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
|
||||
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
|
||||
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
|
||||
} finally {
|
||||
rm(tempConf);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the elasticsearch-certutil tool has been shipped correctly,
|
||||
* and if present then it can execute.
|
||||
*/
|
||||
public void test90SecurityCliPackaging() {
|
||||
final Installation.Executables bin = installation.executables();
|
||||
|
||||
final Path securityCli = installation.lib.resolve("tools").resolve("security-cli");
|
||||
|
||||
if (distribution().isDefault()) {
|
||||
assertTrue(existsInContainer(securityCli));
|
||||
|
||||
Result result = sh.run(bin.elasticsearchCertutil + " --help");
|
||||
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
|
||||
|
||||
// Ensure that the exit code from the java command is passed back up through the shell script
|
||||
result = sh.runIgnoreExitCode(bin.elasticsearchCertutil + " invalid-command");
|
||||
assertThat(result.isSuccess(), is(false));
|
||||
assertThat(result.stdout, containsString("Unknown command [invalid-command]"));
|
||||
} else {
|
||||
assertFalse(existsInContainer(securityCli));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the elasticsearch-shard tool is shipped in the Docker image and is executable.
|
||||
*/
|
||||
public void test91ElasticsearchShardCliPackaging() {
|
||||
final Installation.Executables bin = installation.executables();
|
||||
|
||||
final Result result = sh.run(bin.elasticsearchShard + " -h");
|
||||
assertThat(result.stdout, containsString("A CLI tool to remove corrupted parts of unrecoverable shards"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the elasticsearch-node tool is shipped in the Docker image and is executable.
|
||||
*/
|
||||
public void test92ElasticsearchNodeCliPackaging() {
|
||||
final Installation.Executables bin = installation.executables();
|
||||
|
||||
final Result result = sh.run(bin.elasticsearchNode + " -h");
|
||||
assertThat(result.stdout,
|
||||
containsString("A CLI tool to do unsafe cluster and index manipulations on current node"));
|
||||
}
|
||||
}
|
|
@ -69,11 +69,11 @@ public abstract class PackagingTestCase extends Assert {
|
|||
protected static final String systemJavaHome;
|
||||
static {
|
||||
Shell sh = new Shell();
|
||||
if (Platforms.LINUX) {
|
||||
systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
|
||||
} else {
|
||||
assert Platforms.WINDOWS;
|
||||
if (Platforms.WINDOWS) {
|
||||
systemJavaHome = sh.run("$Env:SYSTEM_JAVA_HOME").stdout.trim();
|
||||
} else {
|
||||
assert Platforms.LINUX || Platforms.DARWIN;
|
||||
systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,9 +33,16 @@ public class Distribution {
|
|||
public Distribution(Path path) {
|
||||
this.path = path;
|
||||
String filename = path.getFileName().toString();
|
||||
int lastDot = filename.lastIndexOf('.');
|
||||
String extension = filename.substring(lastDot + 1);
|
||||
this.packaging = Packaging.valueOf(extension.equals("gz") ? "TAR" : extension.toUpperCase(Locale.ROOT));
|
||||
|
||||
if (filename.endsWith(".gz")) {
|
||||
this.packaging = Packaging.TAR;
|
||||
} else if (filename.endsWith(".docker.tar")) {
|
||||
this.packaging = Packaging.DOCKER;
|
||||
} else {
|
||||
int lastDot = filename.lastIndexOf('.');
|
||||
this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
this.platform = filename.contains("windows") ? Platform.WINDOWS : Platform.LINUX;
|
||||
this.flavor = filename.contains("oss") ? Flavor.OSS : Flavor.DEFAULT;
|
||||
this.hasJdk = filename.contains("no-jdk") == false;
|
||||
|
@ -62,7 +69,8 @@ public class Distribution {
|
|||
TAR(".tar.gz", Platforms.LINUX || Platforms.DARWIN),
|
||||
ZIP(".zip", Platforms.WINDOWS),
|
||||
DEB(".deb", Platforms.isDPKG()),
|
||||
RPM(".rpm", Platforms.isRPM());
|
||||
RPM(".rpm", Platforms.isRPM()),
|
||||
DOCKER(".docker.tar", Platforms.isDocker());
|
||||
|
||||
/** The extension of this distribution's file */
|
||||
public final String extension;
|
||||
|
|
355
qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java
Normal file
355
qa/os/src/test/java/org/elasticsearch/packaging/util/Docker.java
Normal file
|
@ -0,0 +1,355 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.packaging.util;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.nio.file.attribute.PosixFilePermissions.fromString;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p644;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p660;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p755;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p775;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* Utilities for running packaging tests against the Elasticsearch Docker images.
|
||||
*/
|
||||
public class Docker {
|
||||
private static final Log logger = LogFactory.getLog(Docker.class);
|
||||
|
||||
private static final Shell sh = new Shell();
|
||||
private static final DockerShell dockerShell = new DockerShell();
|
||||
|
||||
/**
|
||||
* Tracks the currently running Docker image. An earlier implementation used a fixed container name,
|
||||
* but that appeared to cause problems with repeatedly destroying and recreating containers with
|
||||
* the same name.
|
||||
*/
|
||||
private static String containerId = null;
|
||||
|
||||
/**
|
||||
* Checks whether the required Docker image exists. If not, the image is loaded from disk. No check is made
|
||||
* to see whether the image is up-to-date.
|
||||
* @param distribution details about the docker image to potentially load.
|
||||
*/
|
||||
public static void ensureImageIsLoaded(Distribution distribution) {
|
||||
final long count = sh.run("docker image ls --format '{{.Repository}}' " + distribution.flavor.name).stdout.lines().count();
|
||||
|
||||
if (count != 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Loading Docker image: " + distribution.path);
|
||||
sh.run("docker load -i " + distribution.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs an Elasticsearch Docker container.
|
||||
* @param distribution details about the docker image being tested.
|
||||
*/
|
||||
public static Installation runContainer(Distribution distribution) throws Exception {
|
||||
return runContainer(distribution, null, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs an Elasticsearch Docker container, with options for overriding the config directory
|
||||
* through a bind mount, and passing additional environment variables.
|
||||
*
|
||||
* @param distribution details about the docker image being tested.
|
||||
* @param configPath the path to the config to bind mount, or null
|
||||
* @param envVars environment variables to set when running the container
|
||||
*/
|
||||
public static Installation runContainer(Distribution distribution, Path configPath, Map<String,String> envVars) throws Exception {
|
||||
removeContainer();
|
||||
|
||||
final List<String> args = new ArrayList<>();
|
||||
|
||||
args.add("docker run");
|
||||
|
||||
// Remove the container once it exits
|
||||
args.add("--rm");
|
||||
|
||||
// Run the container in the background
|
||||
args.add("--detach");
|
||||
|
||||
envVars.forEach((key, value) -> args.add("--env " + key + "=\"" + value + "\""));
|
||||
|
||||
// The container won't run without configuring discovery
|
||||
args.add("--env discovery.type=single-node");
|
||||
|
||||
// Map ports in the container to the host, so that we can send requests
|
||||
args.add("--publish 9200:9200");
|
||||
args.add("--publish 9300:9300");
|
||||
|
||||
if (configPath != null) {
|
||||
// Bind-mount the config dir, if specified
|
||||
args.add("--volume \"" + configPath + ":/usr/share/elasticsearch/config\"");
|
||||
}
|
||||
|
||||
args.add(distribution.flavor.name + ":test");
|
||||
|
||||
final String command = String.join(" ", args);
|
||||
logger.debug("Running command: " + command);
|
||||
containerId = sh.run(command).stdout.trim();
|
||||
|
||||
waitForElasticsearchToStart();
|
||||
|
||||
return Installation.ofContainer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the Elasticsearch process to start executing in the container.
|
||||
* This is called every time a container is started.
|
||||
*/
|
||||
private static void waitForElasticsearchToStart() throws InterruptedException {
|
||||
boolean isElasticsearchRunning = false;
|
||||
int attempt = 0;
|
||||
|
||||
do {
|
||||
String psOutput = dockerShell.run("ps ax").stdout;
|
||||
|
||||
if (psOutput.contains("/usr/share/elasticsearch/jdk/bin/java -X")) {
|
||||
isElasticsearchRunning = true;
|
||||
break;
|
||||
}
|
||||
|
||||
Thread.sleep(1000);
|
||||
} while (attempt++ < 5);
|
||||
|
||||
if (!isElasticsearchRunning) {
|
||||
final String logs = sh.run("docker logs " + containerId).stdout;
|
||||
fail("Elasticsearch container did start successfully.\n\n" + logs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the currently running container.
|
||||
*/
|
||||
public static void removeContainer() {
|
||||
if (containerId != null) {
|
||||
try {
|
||||
// Remove the container, forcibly killing it if necessary
|
||||
logger.debug("Removing container " + containerId);
|
||||
final String command = "docker rm -f " + containerId;
|
||||
final Shell.Result result = sh.runIgnoreExitCode(command);
|
||||
|
||||
if (result.isSuccess() == false) {
|
||||
// I'm not sure why we're already removing this container, but that's OK.
|
||||
if (result.stderr.contains("removal of container " + " is already in progress") == false) {
|
||||
throw new RuntimeException(
|
||||
"Command was not successful: [" + command + "] result: " + result.toString());
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
// Null out the containerId under all circumstances, so that even if the remove command fails
|
||||
// for some reason, the other tests will still proceed. Otherwise they can get stuck, continually
|
||||
// trying to remove a non-existent container ID.
|
||||
containerId = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies a file from the container into the local filesystem
|
||||
* @param from the file to copy in the container
|
||||
* @param to the location to place the copy
|
||||
*/
|
||||
public static void copyFromContainer(Path from, Path to) {
|
||||
final String script = "docker cp " + containerId + ":" + from + " " + to;
|
||||
logger.debug("Copying file from container with: " + script);
|
||||
sh.run(script);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends {@link Shell} so that executed commands happen in the currently running Docker container.
|
||||
*/
|
||||
public static class DockerShell extends Shell {
|
||||
@Override
|
||||
protected String[] getScriptCommand(String script) {
|
||||
assert containerId != null;
|
||||
|
||||
return super.getScriptCommand("docker exec " +
|
||||
"--user elasticsearch:root " +
|
||||
"--tty " +
|
||||
containerId + " " +
|
||||
script);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a path exists in the Docker container.
|
||||
*/
|
||||
public static boolean existsInContainer(Path path) {
|
||||
logger.debug("Checking whether file " + path + " exists in container");
|
||||
final Shell.Result result = dockerShell.runIgnoreExitCode("test -e " + path);
|
||||
|
||||
return result.isSuccess();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the specified path's permissions and ownership match those specified.
|
||||
*/
|
||||
public static void assertPermissionsAndOwnership(Path path, Set<PosixFilePermission> expectedPermissions) {
|
||||
logger.debug("Checking permissions and ownership of [" + path + "]");
|
||||
|
||||
final String[] components = dockerShell.run("stat --format=\"%U %G %A\" " + path).stdout.split("\\s+");
|
||||
|
||||
final String username = components[0];
|
||||
final String group = components[1];
|
||||
final String permissions = components[2];
|
||||
|
||||
// The final substring() is because we don't check the directory bit, and we
|
||||
// also don't want any SELinux security context indicator.
|
||||
Set<PosixFilePermission> actualPermissions = fromString(permissions.substring(1, 10));
|
||||
|
||||
assertEquals("Permissions of " + path + " are wrong", actualPermissions, expectedPermissions);
|
||||
assertThat("File owner of " + path + " is wrong", username, equalTo("elasticsearch"));
|
||||
assertThat("File group of " + path + " is wrong", group, equalTo("root"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for up to 5 seconds for a path to exist in the container.
|
||||
*/
|
||||
public static void waitForPathToExist(Path path) throws InterruptedException {
|
||||
int attempt = 0;
|
||||
|
||||
do {
|
||||
if (existsInContainer(path)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Thread.sleep(500);
|
||||
} while (attempt++ < 10);
|
||||
|
||||
fail(path + " failed to exist after 5000ms");
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a variety of checks on an installation. If the current distribution is not OSS, additional checks are carried out.
|
||||
*/
|
||||
public static void verifyContainerInstallation(Installation installation, Distribution distribution) {
|
||||
verifyOssInstallation(installation);
|
||||
if (distribution.flavor == Distribution.Flavor.DEFAULT) {
|
||||
verifyDefaultInstallation(installation);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyOssInstallation(Installation es) {
|
||||
dockerShell.run("id elasticsearch");
|
||||
dockerShell.run("getent group elasticsearch");
|
||||
|
||||
final Shell.Result passwdResult = dockerShell.run("getent passwd elasticsearch");
|
||||
final String homeDir = passwdResult.stdout.trim().split(":")[5];
|
||||
assertThat(homeDir, equalTo("/usr/share/elasticsearch"));
|
||||
|
||||
Stream.of(
|
||||
es.home,
|
||||
es.data,
|
||||
es.logs,
|
||||
es.config
|
||||
).forEach(dir -> assertPermissionsAndOwnership(dir, p775));
|
||||
|
||||
Stream.of(
|
||||
es.plugins,
|
||||
es.modules
|
||||
).forEach(dir -> assertPermissionsAndOwnership(dir, p755));
|
||||
|
||||
// FIXME these files should all have the same permissions
|
||||
Stream.of(
|
||||
"elasticsearch.keystore",
|
||||
// "elasticsearch.yml",
|
||||
"jvm.options"
|
||||
// "log4j2.properties"
|
||||
).forEach(configFile -> assertPermissionsAndOwnership(es.config(configFile), p660));
|
||||
|
||||
Stream.of(
|
||||
"elasticsearch.yml",
|
||||
"log4j2.properties"
|
||||
).forEach(configFile -> assertPermissionsAndOwnership(es.config(configFile), p644));
|
||||
|
||||
assertThat(
|
||||
dockerShell.run(es.bin("elasticsearch-keystore") + " list").stdout,
|
||||
containsString("keystore.seed"));
|
||||
|
||||
Stream.of(
|
||||
es.bin,
|
||||
es.lib
|
||||
).forEach(dir -> assertPermissionsAndOwnership(dir, p755));
|
||||
|
||||
Stream.of(
|
||||
"elasticsearch",
|
||||
"elasticsearch-cli",
|
||||
"elasticsearch-env",
|
||||
"elasticsearch-enve",
|
||||
"elasticsearch-keystore",
|
||||
"elasticsearch-node",
|
||||
"elasticsearch-plugin",
|
||||
"elasticsearch-shard"
|
||||
).forEach(executable -> assertPermissionsAndOwnership(es.bin(executable), p755));
|
||||
|
||||
Stream.of(
|
||||
"LICENSE.txt",
|
||||
"NOTICE.txt",
|
||||
"README.textile"
|
||||
).forEach(doc -> assertPermissionsAndOwnership(es.home.resolve(doc), p644));
|
||||
}
|
||||
|
||||
private static void verifyDefaultInstallation(Installation es) {
|
||||
Stream.of(
|
||||
"elasticsearch-certgen",
|
||||
"elasticsearch-certutil",
|
||||
"elasticsearch-croneval",
|
||||
"elasticsearch-saml-metadata",
|
||||
"elasticsearch-setup-passwords",
|
||||
"elasticsearch-sql-cli",
|
||||
"elasticsearch-syskeygen",
|
||||
"elasticsearch-users",
|
||||
"x-pack-env",
|
||||
"x-pack-security-env",
|
||||
"x-pack-watcher-env"
|
||||
).forEach(executable -> assertPermissionsAndOwnership(es.bin(executable), p755));
|
||||
|
||||
// at this time we only install the current version of archive distributions, but if that changes we'll need to pass
|
||||
// the version through here
|
||||
assertPermissionsAndOwnership(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), p755);
|
||||
|
||||
Stream.of(
|
||||
"role_mapping.yml",
|
||||
"roles.yml",
|
||||
"users",
|
||||
"users_roles"
|
||||
).forEach(configFile -> assertPermissionsAndOwnership(es.config(configFile), p660));
|
||||
}
|
||||
}
|
|
@ -45,6 +45,7 @@ public class FileMatcher extends TypeSafeMatcher<Path> {
|
|||
|
||||
public enum Fileness { File, Directory }
|
||||
|
||||
public static final Set<PosixFilePermission> p775 = fromString("rwxrwxr-x");
|
||||
public static final Set<PosixFilePermission> p755 = fromString("rwxr-xr-x");
|
||||
public static final Set<PosixFilePermission> p750 = fromString("rwxr-x---");
|
||||
public static final Set<PosixFilePermission> p660 = fromString("rw-rw----");
|
||||
|
|
|
@ -84,6 +84,20 @@ public class Installation {
|
|||
);
|
||||
}
|
||||
|
||||
public static Installation ofContainer() {
|
||||
String root = "/usr/share/elasticsearch";
|
||||
return new Installation(
|
||||
Paths.get(root),
|
||||
Paths.get(root + "/config"),
|
||||
Paths.get(root + "/data"),
|
||||
Paths.get(root + "/logs"),
|
||||
Paths.get(root + "/plugins"),
|
||||
Paths.get(root + "/modules"),
|
||||
null,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
public Path bin(String executableName) {
|
||||
return bin.resolve(executableName);
|
||||
}
|
||||
|
|
|
@ -65,6 +65,10 @@ public class Platforms {
|
|||
return new Shell().runIgnoreExitCode("which service").isSuccess();
|
||||
}
|
||||
|
||||
public static boolean isDocker() {
|
||||
return new Shell().runIgnoreExitCode("which docker").isSuccess();
|
||||
}
|
||||
|
||||
public static void onWindows(PlatformAction action) throws Exception {
|
||||
if (WINDOWS) {
|
||||
action.run();
|
||||
|
|
|
@ -93,7 +93,8 @@ public class Shell {
|
|||
String formattedCommand = String.format(Locale.ROOT, command, args);
|
||||
return run(formattedCommand);
|
||||
}
|
||||
private String[] getScriptCommand(String script) {
|
||||
|
||||
protected String[] getScriptCommand(String script) {
|
||||
if (Platforms.WINDOWS) {
|
||||
return powershellCommand(script);
|
||||
} else {
|
||||
|
@ -102,11 +103,11 @@ public class Shell {
|
|||
}
|
||||
|
||||
private static String[] bashCommand(String script) {
|
||||
return Stream.concat(Stream.of("bash", "-c"), Stream.of(script)).toArray(String[]::new);
|
||||
return new String[] { "bash", "-c", script };
|
||||
}
|
||||
|
||||
private static String[] powershellCommand(String script) {
|
||||
return Stream.concat(Stream.of("powershell.exe", "-Command"), Stream.of(script)).toArray(String[]::new);
|
||||
return new String[] { "powershell.exe", "-Command", script };
|
||||
}
|
||||
|
||||
private Result runScript(String[] command) {
|
||||
|
|
|
@ -30,6 +30,8 @@ List projects = [
|
|||
'distribution:docker',
|
||||
'distribution:docker:oss-docker-build-context',
|
||||
'distribution:docker:docker-build-context',
|
||||
'distribution:docker:oss-docker-export',
|
||||
'distribution:docker:docker-export',
|
||||
'distribution:packages:oss-deb',
|
||||
'distribution:packages:deb',
|
||||
'distribution:packages:oss-no-jdk-deb',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue