Build Docker images from exported contexts and test them (#70088)

Closes #69930. Closes #69928.

The ES build currently has 2 types of Docker output - Docker images,
and Docker build contexts. At the moment, only the images are tested,
meaning that bugs in the build contexts can go unnoticed.

This PR changes how we create Docker images so that we first create
the build contexts, and then build the images using them. This does
require some sleight-of-hand - the build contexts expect to download
an Elasticsearch archive directorly from the `Dockerfile`, which
will only ever work for non-snapshot version builds. In order to
get around this, the `Dockerfile` is modified to `COPY` in a local
archive file. Any other dependency files must exist in the build
context archive.

This PR also builds and tests the Iron Bank context. We do not
currently build a Docker image for this at all, and to build an
image requires us to set some build arguments to useful values. We
also need to provide all artifacts to the build, as the `Dockerfile`
cannot download anything. As a result, the `:distribution:docker`
project now defines a GitHub repository so that Gradle will download
a `tini` binary.

Note that there will need to be corresponding changes to
`release-manager`.
This commit is contained in:
Rory Hunter 2021-03-26 13:58:03 +00:00 committed by GitHub
parent fa93666b6a
commit 1c5b7653f0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 389 additions and 273 deletions

View file

@ -12,19 +12,27 @@ package org.elasticsearch.gradle;
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
*/
public enum DockerBase {
CENTOS("centos:8"),
CENTOS("centos:8", ""),
// "latest" here is intentional, since the image name specifies "8"
UBI("docker.elastic.co/ubi8/ubi-minimal:latest"),
UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi8"),
// The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}");
IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank");
private final String image;
private final String suffix;
DockerBase(String image) {
DockerBase(String image, String suffix) {
this.image = image;
this.suffix = suffix;
}
public String getImage() {
return image;
}
public String getSuffix() {
return suffix;
}
}

View file

@ -42,7 +42,9 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
DEB,
DOCKER,
// This is a different flavour of Docker image
DOCKER_UBI;
DOCKER_UBI,
// Like UBI, but a little different.
DOCKER_IRON_BANK;
@Override
public String toString() {
@ -54,6 +56,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
case DEB:
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
case RPM:
return false;
@ -61,6 +64,18 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
return true;
}
}
public boolean isDocker() {
switch (this) {
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
return true;
default:
return false;
}
}
}
// package private to tests can use
@ -141,8 +156,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
}
public boolean isDocker() {
final Type type = this.type.get();
return type == Type.DOCKER || type == Type.DOCKER_UBI;
return this.type.get().isDocker();
}
public void setBundledJdk(Boolean bundledJdk) {
@ -194,6 +208,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
case DEB:
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
case RPM:
throw new UnsupportedOperationException(
"distribution type [" + getType() + "] for " + "elasticsearch distribution [" + name + "] cannot be extracted"

View file

@ -14,7 +14,9 @@ import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.RegularFileProperty;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.ListProperty;
import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
@ -35,17 +37,22 @@ public class DockerBuildTask extends DefaultTask {
private static final Logger LOGGER = Logging.getLogger(DockerBuildTask.class);
private final WorkerExecutor workerExecutor;
private final RegularFileProperty markerFile = getProject().getObjects().fileProperty();
private final DirectoryProperty dockerContext = getProject().getObjects().directoryProperty();
private final RegularFileProperty markerFile;
private final DirectoryProperty dockerContext;
private String[] tags;
private boolean pull = true;
private boolean noCache = true;
private String[] baseImages;
private MapProperty<String, String> buildArgs;
@Inject
public DockerBuildTask(WorkerExecutor workerExecutor) {
public DockerBuildTask(WorkerExecutor workerExecutor, ObjectFactory objectFactory) {
this.workerExecutor = workerExecutor;
this.markerFile = objectFactory.fileProperty();
this.dockerContext = objectFactory.directoryProperty();
this.buildArgs = objectFactory.mapProperty(String.class, String.class);
this.markerFile.set(getProject().getLayout().getBuildDirectory().file("markers/" + this.getName() + ".marker"));
}
@ -57,7 +64,8 @@ public class DockerBuildTask extends DefaultTask {
params.getTags().set(Arrays.asList(tags));
params.getPull().set(pull);
params.getNoCache().set(noCache);
params.getBaseImages().set(baseImages);
params.getBaseImages().set(Arrays.asList(baseImages));
params.getBuildArgs().set(buildArgs);
});
}
@ -103,6 +111,15 @@ public class DockerBuildTask extends DefaultTask {
this.baseImages = baseImages;
}
@Input
public MapProperty<String, String> getBuildArgs() {
return buildArgs;
}
public void setBuildArgs(MapProperty<String, String> buildArgs) {
this.buildArgs = buildArgs;
}
@OutputFile
public RegularFileProperty getMarkerFile() {
return markerFile;
@ -147,9 +164,7 @@ public class DockerBuildTask extends DefaultTask {
final Parameters parameters = getParameters();
if (parameters.getPull().get()) {
for (String baseImage : parameters.getBaseImages().get()) {
pullBaseImage(baseImage);
}
parameters.getBaseImages().get().forEach(this::pullBaseImage);
}
LoggedExec.exec(execOperations, spec -> {
@ -162,6 +177,8 @@ public class DockerBuildTask extends DefaultTask {
}
parameters.getTags().get().forEach(tag -> spec.args("--tag", tag));
parameters.getBuildArgs().get().forEach((k, v) -> spec.args("--build-arg", k + "=" + v));
});
try {
@ -183,6 +200,8 @@ public class DockerBuildTask extends DefaultTask {
Property<Boolean> getNoCache();
Property<String[]> getBaseImages();
ListProperty<String> getBaseImages();
MapProperty<String, String> getBuildArgs();
}
}

View file

@ -114,6 +114,7 @@ public class InternalDistributionDownloadPlugin implements InternalPlugin {
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
projectPath += ":docker:";
projectPath += distributionProjectName(distribution);
break;
@ -153,24 +154,22 @@ public class InternalDistributionDownloadPlugin implements InternalPlugin {
switch (distribution.getType()) {
case ARCHIVE:
projectName += platform.toString() + archString + (platform == ElasticsearchDistribution.Platform.WINDOWS
return projectName + platform.toString() + archString + (platform == ElasticsearchDistribution.Platform.WINDOWS
? "-zip"
: "-tar");
break;
case DOCKER:
projectName += "docker" + archString + "-export";
break;
return projectName + "docker" + archString + "-export";
case DOCKER_UBI:
projectName += "ubi-docker" + archString + "-export";
break;
return projectName + "ubi-docker" + archString + "-export";
case DOCKER_IRON_BANK:
return projectName + "ironbank-docker" + archString + "-export";
default:
projectName += distribution.getType();
break;
return projectName + distribution.getType();
}
return projectName;
}
private static class ProjectBasedDistributionDependency implements DistributionDependency {

View file

@ -53,6 +53,9 @@ import java.util.stream.Stream;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath;
/**
* This class defines gradle tasks for testing our various distribution artifacts.
*/
public class DistroTestPlugin implements Plugin<Project> {
private static final String SYSTEM_JDK_VERSION = "11.0.2+9";
private static final String SYSTEM_JDK_VENDOR = "openjdk";
@ -97,6 +100,7 @@ public class DistroTestPlugin implements Plugin<Project> {
Map<Type, List<TaskProvider<Test>>> linuxTestTasks = new HashMap<>();
Map<String, List<TaskProvider<Test>>> upgradeTestTasks = new HashMap<>();
Map<String, TaskProvider<?>> depsTasks = new HashMap<>();
for (ElasticsearchDistribution distribution : testDistributions) {
String taskname = destructiveDistroTestTaskName(distribution);
TaskProvider<?> depsTask = project.getTasks().register(taskname + "#deps");
@ -196,8 +200,7 @@ public class DistroTestPlugin implements Plugin<Project> {
// auto-detection doesn't work.
//
// The shouldTestDocker property could be null, hence we use Boolean.TRUE.equals()
boolean shouldExecute = (type != Type.DOCKER && type != Type.DOCKER_UBI)
|| Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
boolean shouldExecute = (type.isDocker()) || Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
if (shouldExecute) {
distroTest.configure(t -> t.dependsOn(wrapperTask));
@ -223,7 +226,8 @@ public class DistroTestPlugin implements Plugin<Project> {
private static Map<ElasticsearchDistribution.Type, TaskProvider<?>> lifecycleTasks(Project project, String taskPrefix) {
Map<ElasticsearchDistribution.Type, TaskProvider<?>> lifecyleTasks = new HashMap<>();
lifecyleTasks.put(Type.DOCKER, project.getTasks().register(taskPrefix + ".docker"));
lifecyleTasks.put(Type.DOCKER_UBI, project.getTasks().register(taskPrefix + ".ubi"));
lifecyleTasks.put(Type.DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi"));
lifecyleTasks.put(Type.DOCKER_IRON_BANK, project.getTasks().register(taskPrefix + ".docker-ironbank"));
lifecyleTasks.put(Type.ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
lifecyleTasks.put(Type.DEB, project.getTasks().register(taskPrefix + ".packages"));
lifecyleTasks.put(Type.RPM, lifecyleTasks.get(Type.DEB));
@ -361,7 +365,7 @@ public class DistroTestPlugin implements Plugin<Project> {
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
for (Architecture architecture : Architecture.values()) {
for (Type type : List.of(Type.DEB, Type.RPM, Type.DOCKER, Type.DOCKER_UBI)) {
Arrays.stream(Type.values()).filter(t -> t != Type.INTEG_TEST_ZIP && t != Type.ARCHIVE).forEach(type -> {
for (boolean bundledJdk : Arrays.asList(true, false)) {
if (bundledJdk == false) {
// We'll never publish an ARM (aarch64) build without a bundled JDK.
@ -369,7 +373,7 @@ public class DistroTestPlugin implements Plugin<Project> {
continue;
}
// All our Docker images include a bundled JDK so it doesn't make sense to test without one.
if (type == Type.DOCKER || type == Type.DOCKER_UBI) {
if (type.isDocker()) {
continue;
}
}
@ -377,7 +381,7 @@ public class DistroTestPlugin implements Plugin<Project> {
createDistro(distributions, architecture, type, null, bundledJdk, VersionProperties.getElasticsearch())
);
}
}
});
}
for (Architecture architecture : Architecture.values()) {
@ -408,7 +412,7 @@ public class DistroTestPlugin implements Plugin<Project> {
String version
) {
String name = distroId(type, platform, bundledJdk, architecture) + "-" + version;
boolean isDocker = type == Type.DOCKER || type == Type.DOCKER_UBI;
boolean isDocker = type.isDocker();
ElasticsearchDistribution distro = distributions.create(name, d -> {
d.setArchitecture(architecture);
d.setType(type);

View file

@ -14,40 +14,41 @@ apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.internal-distribution-download'
apply plugin: 'elasticsearch.rest-resources'
// Define a repository that allows Gradle to fetch a resource from GitHub. This
// is only used to fetch the `tini` binary, when building the Iron Bank docker image
// for testing purposes.
repositories {
ivy {
url 'https://github.com/'
patternLayout {
artifact '/[organisation]/[module]/releases/download/v[revision]/[ext]'
}
// This is required in Gradle 6.0+ as metadata file (ivy.xml)
// is mandatory. Docs linked below this code section
metadataSources { artifact() }
}
}
testFixtures.useFixture()
configurations {
aarch64DockerSource
dockerSource
transformLog4jJar
tini
}
dependencies {
aarch64DockerSource project(path: ":distribution:archives:linux-aarch64-tar", configuration: 'default')
dockerSource project(path: ":distribution:archives:linux-tar", configuration: 'default')
transformLog4jJar project(path: ":distribution:docker:transform-log4j-config", configuration: 'default')
tini 'krallin:tini:0.19.0@tini-amd64'
}
ext.expansions = { Architecture architecture, DockerBase base, boolean local ->
String classifier
if (local) {
if (architecture == Architecture.AARCH64) {
classifier = "linux-aarch64"
} else if (architecture == Architecture.X64) {
classifier = "linux-x86_64"
} else {
throw new IllegalArgumentException("Unsupported architecture [" + architecture + "]")
}
} else {
/* When sourcing the Elasticsearch build remotely, the same Dockerfile needs
* to be able to fetch the artifact for any supported platform. We can't make
* the decision here. Bash will interpolate the `arch` command for us. */
classifier = "linux-\$(arch)"
}
final String elasticsearch = "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
String buildArgs = '#'
ext.expansions = { Architecture architecture, DockerBase base ->
String buildArgs = ''
if (base == DockerBase.IRON_BANK) {
buildArgs = """
ARG BASE_REGISTRY=nexus-docker-secure.levelup-nexus.svc.cluster.local:18082
@ -56,21 +57,6 @@ ARG BASE_TAG=8.3
"""
}
/* Both the following Dockerfile commands put the resulting artifact at
* the same location, regardless of classifier, so that the commands that
* follow in the Dockerfile don't have to know about the runtime
* architecture. */
String sourceElasticsearch
if (local) {
sourceElasticsearch = "COPY $elasticsearch /opt/elasticsearch.tar.gz"
} else {
sourceElasticsearch = """
RUN curl --retry 10 -S -L \\
--output /opt/elasticsearch.tar.gz \\
https://artifacts-no-kpi.elastic.co/downloads/elasticsearch/$elasticsearch
""".trim()
}
def (major,minor) = VersionProperties.elasticsearch.split("\\.")
return [
@ -82,7 +68,6 @@ RUN curl --retry 10 -S -L \\
'git_revision' : BuildParams.gitRevision,
'license' : 'Elastic-License-2.0',
'package_manager' : base == DockerBase.UBI ? 'microdnf' : 'yum',
'source_elasticsearch': sourceElasticsearch,
'docker_base' : base.name().toLowerCase(),
'version' : VersionProperties.elasticsearch,
'major_minor_version' : "${major}.${minor}",
@ -100,14 +85,6 @@ class SquashNewlinesFilter extends FilterReader {
}
}
private static String buildPath(Architecture architecture, DockerBase base) {
return 'build/' +
(architecture == Architecture.AARCH64 ? 'aarch64-' : '') +
(base == DockerBase.UBI ? 'ubi-' : '') +
(base == DockerBase.UBI ? 'ubi-' : (base == DockerBase.IRON_BANK ? 'ironbank-' : '')) +
'docker'
}
private static String taskName(String prefix, Architecture architecture, DockerBase base, String suffix) {
return prefix +
(architecture == Architecture.AARCH64 ? 'Aarch64' : '') +
@ -115,16 +92,15 @@ private static String taskName(String prefix, Architecture architecture, DockerB
suffix
}
project.ext {
dockerBuildContext = { Architecture architecture, DockerBase base, boolean local ->
ext.dockerBuildContext = { Architecture architecture, DockerBase base ->
copySpec {
final Map<String,String> varExpansions = expansions(architecture, base, local)
final Map<String,String> varExpansions = expansions(architecture, base)
final Path projectDir = project.projectDir.toPath()
if (base == DockerBase.IRON_BANK) {
into('scripts') {
from projectDir.resolve("src/docker/bin")
from(projectDir.resolve("src/docker/config"))
from projectDir.resolve("src/docker/config")
}
from(projectDir.resolve("src/docker/iron_bank")) {
expand(varExpansions)
@ -145,28 +121,6 @@ project.ext {
}
}
}
}
void addCopyDockerContextTask(Architecture architecture, DockerBase base) {
tasks.register(taskName("copy", architecture, base, "DockerContext"), Sync) {
expansions(architecture, base, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
into buildPath(architecture, base)
with dockerBuildContext(architecture, base, true)
into('bin') {
from configurations.transformLog4jJar
}
if (architecture == Architecture.AARCH64) {
from configurations.aarch64DockerSource
} else {
from configurations.dockerSource
}
}
}
def createAndSetWritable(Object... locations) {
locations.each { location ->
@ -211,7 +165,6 @@ tasks.named("preProcessFixture").configure {
)
createAndSetWritable(
"${buildDir}/repo",
"${buildDir}/oss-repo",
"${buildDir}/logs/default-1",
"${buildDir}/logs/default-2",
)
@ -236,45 +189,6 @@ tasks.named("check").configure {
dependsOn "integTest"
}
void addBuildDockerImage(Architecture architecture, DockerBase base) {
final TaskProvider<DockerBuildTask> buildDockerImageTask =
tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) {
onlyIf { Architecture.current() == architecture }
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, base, "DockerContext"))
dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
if (base == DockerBase.UBI) {
baseImages = [ base.getImage() ]
} else {
baseImages = [ base.getImage(), 'alpine:latest' ]
}
String version = VersionProperties.elasticsearch
String suffix = base == DockerBase.UBI ? '-ubi8' : ''
tags = [
"elasticsearch${suffix}:${version}",
"docker.elastic.co/elasticsearch/elasticsearch${suffix}:${version}",
"docker.elastic.co/elasticsearch/elasticsearch-full${suffix}:${version}",
"elasticsearch${suffix}:test"
]
}
tasks.named("assemble").configure {
dependsOn(buildDockerImageTask)
}
}
for (final Architecture architecture : Architecture.values()) {
for (final DockerBase base : DockerBase.values()) {
if (base == DockerBase.IRON_BANK) {
// At the moment we don't actually build the Iron Bank image
continue
}
addCopyDockerContextTask(architecture, base)
addBuildDockerImage(architecture, base)
}
}
// We build the images used in compose locally, but the pull command insists on using a repository
// thus we must disable it to prevent it from doing so.
// Everything will still be pulled since we will build the local images on a pull
@ -282,6 +196,124 @@ tasks.named("composePull").configure {
enabled = false
}
void addBuildDockerContextTask(Architecture architecture, DockerBase base) {
String binDirectory = base == DockerBase.IRON_BANK ? 'scripts' : 'bin'
String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
final TaskProvider<DockerBuildTask> buildContextTask =
tasks.register(taskName('build', architecture, base, 'DockerContext'), Tar) {
archiveExtension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context${arch}"
archiveBaseName = "elasticsearch${base.suffix}"
with dockerBuildContext(architecture, base)
into (binDirectory) {
from configurations.transformLog4jJar
}
onlyIf { Architecture.current() == architecture }
}
}
void addUnpackDockerContextTask(Architecture architecture, DockerBase base) {
tasks.register(taskName("transform", architecture, base, "DockerContext"), Sync) {
TaskProvider<Tar> buildContextTask = tasks.named(taskName("build", architecture, base, "DockerContext"))
dependsOn(buildContextTask)
String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
String archiveName = "elasticsearch${base.suffix}-${VersionProperties.elasticsearch}-docker-build-context${arch}"
String distributionName = "elasticsearch-${VersionProperties.elasticsearch}-linux-${architecture.classifier}.tar.gz"
from(tarTree("${project.buildDir}/distributions/${archiveName}.tar.gz")) {
eachFile { FileCopyDetails details ->
if (details.name.equals("Dockerfile")) {
filter { it.replaceAll('^RUN curl.*artifacts-no-kpi.*$', "COPY ${distributionName} /opt/elasticsearch.tar.gz")}
}
}
}
into "${project.buildDir}/docker-context/${archiveName}"
// Since we replaced the remote URL in the Dockerfile, copy in the required file
if (architecture == Architecture.AARCH64) {
from configurations.aarch64DockerSource
} else {
from configurations.dockerSource
}
if (base == DockerBase.IRON_BANK) {
from (configurations.tini) {
rename { _ -> 'tini' }
}
}
expansions(architecture, base).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
onlyIf { Architecture.current() == architecture }
}
}
private List<String> generateTags(DockerBase base) {
String version = VersionProperties.elasticsearch
return [
"elasticsearch${base.suffix}:test",
"elasticsearch${base.suffix}:${version}",
"docker.elastic.co/elasticsearch/elasticsearch${base.suffix}:${version}"
]
}
void addBuildDockerImageTask(Architecture architecture, DockerBase base) {
final TaskProvider<DockerBuildTask> buildDockerImageTask =
tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) {
TaskProvider<Tar> transformTask = tasks.named(taskName("transform", architecture, base, "DockerContext"))
dependsOn(transformTask)
dockerContext.fileProvider(transformTask.map { it.destinationDir })
tags = generateTags(base)
if (base == DockerBase.IRON_BANK) {
Map<String, String> buildArgsMap = [
'BASE_REGISTRY': 'docker.elastic.co',
'BASE_IMAGE': 'ubi8/ubi',
'BASE_TAG': 'latest'
]
// Iron Bank has a single, parameterized base image
String baseImage = base.image
for (String key : buildArgsMap.keySet()) {
baseImage = baseImage.replace('${' + key + '}', buildArgsMap.get(key))
}
baseImages = [baseImage]
buildArgs = buildArgsMap
} else if (base == DockerBase.CENTOS) {
baseImages = ['alpine:latest', base.image]
} else {
baseImages = [base.image]
}
onlyIf { Architecture.current() == architecture }
}
tasks.named("assemble").configure {
dependsOn(buildDockerImageTask)
}
}
for (final Architecture architecture : Architecture.values()) {
for (final DockerBase base : DockerBase.values()) {
addBuildDockerContextTask(architecture, base)
addUnpackDockerContextTask(architecture, base)
addBuildDockerImageTask(architecture, base)
}
}
/*
* The export subprojects write out the generated Docker images to disk, so
* that they can be easily reloaded, for example into a VM for distribution testing
@ -291,17 +323,19 @@ subprojects { Project subProject ->
apply plugin: 'distribution'
final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64
// We can ignore Iron Bank at the moment as we don't
// build those images ourselves.
final DockerBase base = subProject.name.contains('ubi-') ? DockerBase.UBI : DockerBase.CENTOS
DockerBase base = DockerBase.CENTOS
if (subProject.name.contains('ubi-')) {
base = DockerBase.UBI
} else if (subProject.name.contains('ironbank-')) {
base = DockerBase.IRON_BANK
}
final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
final String suffix = base == DockerBase.UBI ? '-ubi8' : ''
final String extension = base == DockerBase.UBI ? 'ubi.tar' : 'docker.tar'
final String artifactName = "elasticsearch${arch}${suffix}_test"
final String extension = base == DockerBase.UBI ? 'ubi.tar' : (base == DockerBase.IRON_BANK ? 'ironbank.tar' : 'docker.tar')
final String artifactName = "elasticsearch${arch}${base.suffix}_test"
final String exportTaskName = taskName("export", architecture, base, "DockerImage")
final String buildTaskName = taskName("build", architecture, base, "DockerImage")
final String exportTaskName = taskName("export", architecture, base, 'DockerImage')
final String buildTaskName = taskName('build', architecture, base, 'DockerImage')
final String tarFile = "${parent.projectDir}/build/${artifactName}_${VersionProperties.elasticsearch}.${extension}"
tasks.register(exportTaskName, LoggedExec) {
@ -311,7 +345,7 @@ subprojects { Project subProject ->
args "save",
"-o",
tarFile,
"elasticsearch${suffix}:test"
"elasticsearch${base.suffix}:test"
dependsOn(parent.path + ":" + buildTaskName)
onlyIf { Architecture.current() == architecture }

View file

@ -1,26 +0,0 @@
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
configurations {
transformLog4jJar
}
dependencies {
transformLog4jJar project(path: ":distribution:docker:transform-log4j-config", configuration: 'default')
}
tasks.register("buildDockerBuildContext", Tar) {
archiveExtension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
// Non-local builds don't need to specify an architecture.
with dockerBuildContext(null, DockerBase.CENTOS, false)
into('bin') {
from configurations.transformLog4jJar
}
}
tasks.named("assemble").configure {dependsOn "buildDockerBuildContext"}

View file

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View file

@ -1,26 +0,0 @@
import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
configurations {
transformLog4jJar
}
dependencies {
transformLog4jJar project(path: ":distribution:docker:transform-log4j-config", configuration: 'default')
}
tasks.register("buildIronBankDockerBuildContext", Tar) {
archiveExtension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ironbank"
// We supply an architecture here because we always treat Iron Bank
// builds as local - because that is how they are built.
with dockerBuildContext(Architecture.X64, DockerBase.IRON_BANK, true)
into('scripts') {
from configurations.transformLog4jJar
}
}

View file

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View file

@ -222,8 +222,12 @@ FROM ${base_image} AS builder
RUN mkdir /usr/share/elasticsearch
WORKDIR /usr/share/elasticsearch
<% /* Fetch or copy the appropriate Elasticsearch distribution for this architecture */ %>
${source_elasticsearch}
<% /*
Fetch the appropriate Elasticsearch distribution for this architecture.
Keep this command on one line - it is replaced with a `COPY` during local builds.
It uses the `arch` command to fetch the correct distro for the build machine.
*/ %>
RUN curl --retry 10 -S -L --output /opt/elasticsearch.tar.gz https://artifacts-no-kpi.elastic.co/downloads/elasticsearch/elasticsearch-${version}-linux-\$(arch).tar.gz
RUN tar -zxf /opt/elasticsearch.tar.gz --strip-components=1
@ -281,7 +285,7 @@ RUN <%= retry.loop(
%>
RUN ${package_manager} update --setopt=tsflags=nodocs -y && \\
${package_manager} install --setopt=tsflags=nodocs -y \\
nc shadow-utils zip unzip && \\
nc shadow-utils zip findutils unzip procps-ng && \\
${package_manager} clean all
<% } %>

View file

@ -1,27 +0,0 @@
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
configurations {
transformLog4jJar
}
dependencies {
transformLog4jJar project(path: ":distribution:docker:transform-log4j-config", configuration: 'default')
}
tasks.register("buildUbiDockerBuildContext", Tar) {
archiveExtension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ubi8"
// Non-local builds don't need to specify an architecture.
with dockerBuildContext(null, DockerBase.UBI, false)
into('bin') {
from configurations.transformLog4jJar
}
}
tasks.named("assemble").configure { dependsOn("buildUbiDockerBuildContext") }

View file

@ -11,7 +11,7 @@ package org.elasticsearch.packaging.test;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.http.client.fluent.Request;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.DockerRun;
import org.elasticsearch.packaging.util.Installation;
import org.elasticsearch.packaging.util.Platforms;
import org.elasticsearch.packaging.util.ProcessInfo;
@ -34,13 +34,14 @@ import java.util.Set;
import java.util.stream.Collectors;
import static java.nio.file.attribute.PosixFilePermissions.fromString;
import static org.elasticsearch.packaging.util.Distribution.Packaging;
import static org.elasticsearch.packaging.util.Docker.assertPermissionsAndOwnership;
import static org.elasticsearch.packaging.util.Docker.chownWithPrivilegeEscalation;
import static org.elasticsearch.packaging.util.Docker.copyFromContainer;
import static org.elasticsearch.packaging.util.Docker.existsInContainer;
import static org.elasticsearch.packaging.util.Docker.getContainerLogs;
import static org.elasticsearch.packaging.util.Docker.getImageHealthcheck;
import static org.elasticsearch.packaging.util.Docker.getImageLabels;
import static org.elasticsearch.packaging.util.Docker.getImageName;
import static org.elasticsearch.packaging.util.Docker.getJson;
import static org.elasticsearch.packaging.util.Docker.mkDirWithPrivilegeEscalation;
import static org.elasticsearch.packaging.util.Docker.removeContainer;
@ -57,6 +58,7 @@ import static org.elasticsearch.packaging.util.FileMatcher.p775;
import static org.elasticsearch.packaging.util.FileUtils.append;
import static org.elasticsearch.packaging.util.FileUtils.rm;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyString;
import static org.hamcrest.Matchers.equalTo;
@ -545,7 +547,7 @@ public class DockerTests extends PackagingTestCase {
// expected group.
final Shell localSh = new Shell();
final String findResults = localSh.run(
"docker run --rm --tty " + getImageName(distribution) + " bash -c ' touch data/test && find . \\! -group 0 ' "
"docker run --rm --tty " + DockerRun.getImageName(distribution) + " bash -c ' touch data/test && find . \\! -group 0 ' "
).stdout;
assertThat("Found some files whose GID != 0", findResults, is(emptyString()));
@ -556,6 +558,8 @@ public class DockerTests extends PackagingTestCase {
* @see <a href="http://label-schema.org/">Label Schema website</a>
*/
public void test110OrgLabelSchemaLabels() throws Exception {
assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK);
final Map<String, String> labels = getImageLabels(distribution);
final Map<String, String> staticLabels = new HashMap<>();
@ -589,6 +593,8 @@ public class DockerTests extends PackagingTestCase {
* @see <a href="https://github.com/opencontainers/image-spec/blob/master/annotations.md">Open Containers Annotations</a>
*/
public void test110OrgOpencontainersLabels() throws Exception {
assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK);
final Map<String, String> labels = getImageLabels(distribution);
final Map<String, String> staticLabels = new HashMap<>();
@ -755,11 +761,24 @@ public class DockerTests extends PackagingTestCase {
assertThat(xArgs, hasItems("-Xms376m", "-Xmx376m"));
}
/**
* Checks that the image has an appropriate <code>HEALTHCHECK</code> definition for the current distribution.
*/
public void test160CheckImageHealthcheckDefinition() throws Exception {
final List<String> imageHealthcheck = getImageHealthcheck(distribution);
if (distribution.packaging == Packaging.DOCKER_IRON_BANK) {
assertThat(imageHealthcheck, contains("CMD-SHELL", "curl -I -f --max-time 5 http://localhost:9200 || exit 1"));
} else {
assertThat(imageHealthcheck, nullValue());
}
}
/**
* Check that the UBI images has the correct license information in the correct place.
*/
public void test200UbiImagesHaveLicenseDirectory() {
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
assumeTrue(distribution.packaging == Packaging.DOCKER_UBI);
final String[] files = sh.run("find /licenses -type f").stdout.split("\n");
assertThat(files, arrayContaining("/licenses/LICENSE"));
@ -774,7 +793,7 @@ public class DockerTests extends PackagingTestCase {
* Check that the UBI image has the expected labels
*/
public void test210UbiLabels() throws Exception {
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
assumeTrue(distribution.packaging == Packaging.DOCKER_UBI);
final Map<String, String> labels = getImageLabels(distribution);
@ -794,4 +813,36 @@ public class DockerTests extends PackagingTestCase {
dynamicLabels.forEach(key -> assertThat(labels, hasKey(key)));
}
/**
* Check that the Iron Bank image has the correct license information in the correct place.
*/
public void test300IronBankImagesHaveLicenseDirectory() {
assumeTrue(distribution.packaging == Packaging.DOCKER_IRON_BANK);
final String[] files = sh.run("find /licenses -type f").stdout.split("\n");
assertThat(files, arrayContaining("/licenses/LICENSE", "/licenses/LICENSE.addendum"));
// Image doesn't contain `diff`
final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout;
final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout;
assertThat(ubiLicense, equalTo(distroLicense));
}
/**
* Check that the Iron Bank image doesn't define extra labels
*/
public void test310IronBankImageHasNoAdditionalLabels() throws Exception {
assumeTrue(distribution.packaging == Packaging.DOCKER_IRON_BANK);
final Map<String, String> labels = getImageLabels(distribution);
final Set<String> labelKeys = labels.keySet();
// We can't just assert that the labels map is empty, because it can inherit labels from its base.
// This is certainly the case when we build the Iron Bank image using a UBI base. It is unknown
// if that is true for genuine Iron Bank builds.
assertFalse(labelKeys.stream().anyMatch(l -> l.startsWith("org.label-schema.")));
assertFalse(labelKeys.stream().anyMatch(l -> l.startsWith("org.opencontainers.")));
}
}

View file

@ -444,6 +444,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
break;
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
assertPermissionsAndOwnership(keystore, p660);
break;
default:

View file

@ -215,6 +215,7 @@ public abstract class PackagingTestCase extends Assert {
break;
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
installation = Docker.runContainer(distribution);
Docker.verifyContainerInstallation(installation, distribution);
break;
@ -296,6 +297,7 @@ public abstract class PackagingTestCase extends Assert {
return Packages.runElasticsearchStartCommand(sh);
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
// nothing, "installing" docker image is running it
return Shell.NO_OP;
default:
@ -315,6 +317,7 @@ public abstract class PackagingTestCase extends Assert {
break;
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
// nothing, "installing" docker image is running it
break;
default:
@ -335,6 +338,7 @@ public abstract class PackagingTestCase extends Assert {
break;
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
Docker.waitForElasticsearchToStart();
break;
default:

View file

@ -29,6 +29,8 @@ public class Distribution {
this.packaging = Packaging.DOCKER;
} else if (filename.endsWith(".ubi.tar")) {
this.packaging = Packaging.DOCKER_UBI;
} else if (filename.endsWith(".ironbank.tar")) {
this.packaging = Packaging.DOCKER_IRON_BANK;
} else {
int lastDot = filename.lastIndexOf('.');
this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT));
@ -51,8 +53,17 @@ public class Distribution {
return packaging == Packaging.RPM || packaging == Packaging.DEB;
}
/**
* @return whether this distribution is packaged as a Docker image.
*/
public boolean isDocker() {
return packaging == Packaging.DOCKER || packaging == Packaging.DOCKER_UBI;
switch (packaging) {
case DOCKER:
case DOCKER_UBI:
case DOCKER_IRON_BANK:
return true;
}
return false;
}
public enum Packaging {
@ -62,7 +73,8 @@ public class Distribution {
DEB(".deb", Platforms.isDPKG()),
RPM(".rpm", Platforms.isRPM()),
DOCKER(".docker.tar", Platforms.isDocker()),
DOCKER_UBI(".ubi.tar", Platforms.isDocker());
DOCKER_UBI(".ubi.tar", Platforms.isDocker()),
DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker());
/** The extension of this distribution's file */
public final String extension;

View file

@ -28,6 +28,7 @@ import java.util.Set;
import java.util.stream.Stream;
import static java.nio.file.attribute.PosixFilePermissions.fromString;
import static org.elasticsearch.packaging.util.DockerRun.getImageName;
import static org.elasticsearch.packaging.util.FileMatcher.p644;
import static org.elasticsearch.packaging.util.FileMatcher.p664;
import static org.elasticsearch.packaging.util.FileMatcher.p755;
@ -572,13 +573,7 @@ public class Docker {
* @return a mapping from label name to value
*/
public static Map<String, String> getImageLabels(Distribution distribution) throws Exception {
// The format below extracts the .Config.Labels value, and prints it as json. Without the json
// modifier, a stringified Go map is printed instead, which isn't helpful.
String labelsJson = sh.run("docker inspect -f '{{json .Config.Labels}}' " + getImageName(distribution)).stdout;
ObjectMapper mapper = new ObjectMapper();
final JsonNode jsonNode = mapper.readTree(labelsJson);
final JsonNode jsonNode = getImageInspectionJson(distribution).at("/Config/Labels");
Map<String, String> labels = new HashMap<>();
@ -587,11 +582,34 @@ public class Docker {
return labels;
}
/**
* Fetches the <code>HEALTHCHECK</code> command for a Docker image
* @param distribution required to derive the image name
* @return a list of values from `docker inspect`, or null if there is no healthcheck defined
*/
public static List<String> getImageHealthcheck(Distribution distribution) throws Exception {
final JsonNode jsonNode = getImageInspectionJson(distribution).at("/Config/Healthcheck/Test");
if (jsonNode.isMissingNode()) {
return null;
}
List<String> healthcheck = new ArrayList<>(jsonNode.size());
for (JsonNode node : jsonNode) {
healthcheck.add(node.textValue());
}
return healthcheck;
}
private static JsonNode getImageInspectionJson(Distribution distribution) throws Exception {
String labelsJson = sh.run("docker inspect " + getImageName(distribution)).stdout;
ObjectMapper mapper = new ObjectMapper();
return mapper.readTree(labelsJson).get(0);
}
public static Shell.Result getContainerLogs() {
return sh.run("docker logs " + containerId);
}
public static String getImageName(Distribution distribution) {
return "elasticsearch" + (distribution.packaging == Distribution.Packaging.DOCKER_UBI ? "-ubi8" : "") + ":test";
}
}

View file

@ -129,7 +129,31 @@ public class DockerRun {
return String.join(" ", cmd);
}
static String getImageName(Distribution distribution) {
return "elasticsearch" + (distribution.packaging == Distribution.Packaging.DOCKER_UBI ? "-ubi8" : "") + ":test";
/**
* Derives a Docker image name from the supplied distribution.
* @param distribution the distribution to use
* @return an image name
*/
public static String getImageName(Distribution distribution) {
String suffix;
switch (distribution.packaging) {
case DOCKER:
suffix = "";
break;
case DOCKER_UBI:
suffix = "-ubi8";
break;
case DOCKER_IRON_BANK:
suffix = "-ironbank";
break;
default:
throw new IllegalStateException("Unexpected distribution packaging type: " + distribution.packaging);
}
return "elasticsearch" + suffix + ":test";
}
}

View file

@ -28,14 +28,12 @@ List projects = [
'distribution:archives:linux-tar',
'distribution:archives:no-jdk-linux-tar',
'distribution:docker',
'distribution:docker:docker-aarch64-build-context',
'distribution:docker:docker-aarch64-export',
'distribution:docker:docker-build-context',
'distribution:docker:docker-export',
'distribution:docker:ironbank-docker-build-context',
'distribution:docker:ironbank-aarch64-docker-export',
'distribution:docker:ironbank-docker-export',
'distribution:docker:transform-log4j-config',
'distribution:docker:ubi-docker-aarch64-export',
'distribution:docker:ubi-docker-build-context',
'distribution:docker:ubi-docker-export',
'distribution:packages:aarch64-deb',
'distribution:packages:deb',