mirror of
https://github.com/elastic/kibana.git
synced 2025-04-25 02:09:32 -04:00
[QA][Code Coverage] Modularize Code Coverage (#133759)
Kinda sneaky...since we already mutate the jest portion of the file system (target/kibana-coverage/jest) by dumping "jest unit" & "jest integration" coverage into the same "final" directory...go ahead an make "jest integration" use the same ran file designator as "jest unit". This saves me from having to add logic for this later on.
This commit is contained in:
parent
6e0086df00
commit
d92440e631
8 changed files with 147 additions and 88 deletions
|
@ -116,7 +116,7 @@ printf "%s\n" "${results[@]}"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# So the last step "knows" this config ran
|
# So the last step "knows" this config ran
|
||||||
uploadRanFile "ftr_configs"
|
uploadRanFile "functional"
|
||||||
|
|
||||||
# Force exit 0 to ensure the next build step starts.
|
# Force exit 0 to ensure the next build step starts.
|
||||||
exit 0
|
exit 0
|
||||||
|
|
|
@ -8,59 +8,103 @@ source .buildkite/scripts/steps/code_coverage/merge.sh
|
||||||
|
|
||||||
export CODE_COVERAGE=1
|
export CODE_COVERAGE=1
|
||||||
echo "--- Reading Kibana stats cluster creds from vault"
|
echo "--- Reading Kibana stats cluster creds from vault"
|
||||||
export USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
|
USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||||
export PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
|
export USER_FROM_VAULT
|
||||||
export HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
|
PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||||
export TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")
|
export PASS_FROM_VAULT
|
||||||
|
HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||||
echo "--- Print KIBANA_DIR"
|
export HOST_FROM_VAULT
|
||||||
echo "### KIBANA_DIR: $KIBANA_DIR"
|
TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")
|
||||||
|
export TIME_STAMP
|
||||||
|
|
||||||
echo "--- Download previous git sha"
|
echo "--- Download previous git sha"
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/downloadPrevSha.sh
|
.buildkite/scripts/steps/code_coverage/reporting/downloadPrevSha.sh
|
||||||
previousSha=$(cat downloaded_previous.txt)
|
PREVIOUS_SHA=$(cat downloaded_previous.txt)
|
||||||
|
|
||||||
echo "--- Upload new git sha"
|
echo "--- Upload new git sha"
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/uploadPrevSha.sh
|
.buildkite/scripts/steps/code_coverage/reporting/uploadPrevSha.sh
|
||||||
|
|
||||||
.buildkite/scripts/bootstrap.sh
|
.buildkite/scripts/bootstrap.sh
|
||||||
|
|
||||||
echo "--- Download coverage artifacts"
|
collectRan() {
|
||||||
buildkite-agent artifact download target/kibana-coverage/jest/* .
|
buildkite-agent artifact download target/ran_files/* .
|
||||||
#buildkite-agent artifact download target/kibana-coverage/functional/* .
|
|
||||||
buildkite-agent artifact download target/ran_files/* .
|
|
||||||
ls -l target/ran_files/* || echo "### No ran-files found"
|
|
||||||
|
|
||||||
echo "--- process HTML Links"
|
while read -r x; do
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh
|
ran=("${ran[@]}" "$(cat "$x")")
|
||||||
|
done <<<"$(find target/ran_files -maxdepth 1 -type f -name '*.txt')"
|
||||||
|
|
||||||
echo "--- collect VCS Info"
|
echo "--- Collected Ran files: ${ran[*]}"
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh
|
}
|
||||||
|
|
||||||
echo "--- Jest: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
uniqueifyRanConfigs() {
|
||||||
# Jest: Reset file paths prefix to Kibana Dir of final worker
|
local xs=("$@")
|
||||||
replacePaths "$KIBANA_DIR/target/kibana-coverage/jest" "CC_REPLACEMENT_ANCHOR" "$KIBANA_DIR"
|
local xss
|
||||||
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.jest.config.js
|
xss=$(printf "%s\n" "${xs[@]}" | sort -u | tr '\n' ' ' | xargs) # xargs trims whitespace
|
||||||
|
uniqRanConfigs=("$xss")
|
||||||
|
echo "--- Uniq Ran files: ${uniqRanConfigs[*]}"
|
||||||
|
}
|
||||||
|
|
||||||
#echo "--- Functional: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
fetchArtifacts() {
|
||||||
# Functional: Reset file paths prefix to Kibana Dir of final worker
|
echo "--- Fetch coverage artifacts"
|
||||||
#set +e
|
|
||||||
#sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
|
|
||||||
#echo "--- Begin Split and Merge for Functional"
|
|
||||||
#splitCoverage target/kibana-coverage/functional
|
|
||||||
#splitMerge
|
|
||||||
#set -e
|
|
||||||
|
|
||||||
echo "--- Archive and upload combined reports"
|
local xs=("$@")
|
||||||
collectAndUpload target/kibana-coverage/jest/kibana-jest-coverage.tar.gz \
|
for x in "${xs[@]}"; do
|
||||||
target/kibana-coverage/jest-combined
|
buildkite-agent artifact download "target/kibana-coverage/${x}/*" .
|
||||||
#collectAndUpload target/kibana-coverage/functional/kibana-functional-coverage.tar.gz \
|
done
|
||||||
# target/kibana-coverage/functional-combined
|
}
|
||||||
|
|
||||||
echo "--- Upload coverage static site"
|
archiveReports() {
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh
|
echo "--- Archive and upload combined reports"
|
||||||
|
|
||||||
echo "--- Ingest results to Kibana stats cluster"
|
local xs=("$@")
|
||||||
.buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
|
for x in "${xs[@]}"; do
|
||||||
${BUILDKITE_BUILD_NUMBER} ${BUILDKITE_BUILD_URL} ${previousSha} \
|
echo "### Collect and Upload for: ${x}"
|
||||||
|
# fileHeads "target/file-heads-archive-reports-for-${x}.txt" "target/kibana-coverage/${x}"
|
||||||
|
# dirListing "target/dir-listing-${x}-combined-during-archiveReports.txt" target/kibana-coverage/${x}-combined
|
||||||
|
# dirListing "target/dir-listing-${x}-during-archiveReports.txt" target/kibana-coverage/${x}
|
||||||
|
collectAndUpload "target/kibana-coverage/${x}/kibana-${x}-coverage.tar.gz" "target/kibana-coverage/${x}-combined"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
mergeAll() {
|
||||||
|
local xs=("$@")
|
||||||
|
|
||||||
|
for x in "${xs[@]}"; do
|
||||||
|
if [ "$x" == "jest" ]; then
|
||||||
|
echo "--- [$x]: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||||
|
replacePaths "$KIBANA_DIR/target/kibana-coverage/jest" "CC_REPLACEMENT_ANCHOR" "$KIBANA_DIR"
|
||||||
|
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.jest.config.js
|
||||||
|
elif [ "$x" == "functional" ]; then
|
||||||
|
echo "---[$x] : Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||||
|
set +e
|
||||||
|
sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
|
||||||
|
echo "--- Begin Split and Merge for Functional"
|
||||||
|
splitCoverage target/kibana-coverage/functional
|
||||||
|
splitMerge
|
||||||
|
set -e
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
modularize() {
|
||||||
|
collectRan
|
||||||
|
if [ -d target/ran_files ]; then
|
||||||
|
uniqueifyRanConfigs "${ran[@]}"
|
||||||
|
fetchArtifacts "${uniqRanConfigs[@]}"
|
||||||
|
mergeAll "${uniqRanConfigs[@]}"
|
||||||
|
archiveReports "${uniqRanConfigs[@]}"
|
||||||
|
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh "${uniqRanConfigs[@]}"
|
||||||
|
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh "${uniqRanConfigs[@]}"
|
||||||
|
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh
|
||||||
|
source .buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
|
||||||
|
"${BUILDKITE_BUILD_NUMBER}" "${BUILDKITE_BUILD_URL}" "${PREVIOUS_SHA}" \
|
||||||
'src/dev/code_coverage/ingest_coverage/team_assignment/team_assignments.txt'
|
'src/dev/code_coverage/ingest_coverage/team_assignment/team_assignments.txt'
|
||||||
|
ingestModular "${uniqRanConfigs[@]}"
|
||||||
|
else
|
||||||
|
echo "--- Found zero configs that ran, cancelling ingestion."
|
||||||
|
exit 11
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
modularize
|
||||||
|
echo "### unique ran configs: ${uniqRanConfigs[*]}"
|
||||||
|
|
|
@ -15,4 +15,4 @@ echo '--- Jest Integration code coverage'
|
||||||
.buildkite/scripts/steps/code_coverage/jest_parallel.sh jest.integration.config.js
|
.buildkite/scripts/steps/code_coverage/jest_parallel.sh jest.integration.config.js
|
||||||
|
|
||||||
# So the last step "knows" this config ran
|
# So the last step "knows" this config ran
|
||||||
uploadRanFile "jest_integration"
|
uploadRanFile "jest"
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "--- collect VCS Info"
|
||||||
|
|
||||||
echo "### Prok'd Index File: ..."
|
echo "### Prok'd Index File: ..."
|
||||||
cat src/dev/code_coverage/www/index.html
|
cat src/dev/code_coverage/www/index.html
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,6 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
echo "### Ingesting Code Coverage"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
COVERAGE_JOB_NAME=$1
|
COVERAGE_JOB_NAME=$1
|
||||||
export COVERAGE_JOB_NAME
|
export COVERAGE_JOB_NAME
|
||||||
echo "### debug COVERAGE_JOB_NAME: ${COVERAGE_JOB_NAME}"
|
echo "### debug COVERAGE_JOB_NAME: ${COVERAGE_JOB_NAME}"
|
||||||
|
@ -31,27 +28,25 @@ echo "### debug TEAM_ASSIGN_PATH: ${TEAM_ASSIGN_PATH}"
|
||||||
|
|
||||||
BUFFER_SIZE=500
|
BUFFER_SIZE=500
|
||||||
export BUFFER_SIZE
|
export BUFFER_SIZE
|
||||||
echo "### debug BUFFER_SIZE: ${BUFFER_SIZE}"
|
|
||||||
|
|
||||||
# Build team assignments file
|
ingestModular() {
|
||||||
echo "### Generate Team Assignments"
|
local xs=("$@")
|
||||||
CI_STATS_DISABLED=true node scripts/generate_team_assignments.js \
|
|
||||||
--verbose --src '.github/CODEOWNERS' --dest $TEAM_ASSIGN_PATH
|
|
||||||
|
|
||||||
#for x in functional jest; do
|
echo "--- Generate Team Assignments"
|
||||||
# echo "### Ingesting coverage for ${x}"
|
CI_STATS_DISABLED=true node scripts/generate_team_assignments.js \
|
||||||
# COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"
|
--verbose --src '.github/CODEOWNERS' --dest "$TEAM_ASSIGN_PATH"
|
||||||
#
|
|
||||||
# CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
|
|
||||||
# --vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH &
|
|
||||||
#done
|
|
||||||
#wait
|
|
||||||
|
|
||||||
echo "### Ingesting coverage for JEST"
|
echo "--- Ingest results to Kibana stats cluster"
|
||||||
COVERAGE_SUMMARY_FILE="target/kibana-coverage/jest-combined/coverage-summary.json"
|
for x in "${xs[@]}"; do
|
||||||
|
echo "--- Ingesting coverage for ${x}"
|
||||||
|
|
||||||
CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
|
COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"
|
||||||
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH
|
|
||||||
|
|
||||||
echo "--- Ingesting Code Coverage - Complete"
|
CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path "${COVERAGE_SUMMARY_FILE}" \
|
||||||
echo ""
|
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath "$TEAM_ASSIGN_PATH" &
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
|
||||||
|
echo "--- Ingesting Code Coverage - Complete"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
|
@ -2,8 +2,20 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
cat << EOF > src/dev/code_coverage/www/index_partial_2.html
|
echo "--- process HTML Links"
|
||||||
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>
|
|
||||||
|
xs=("$@")
|
||||||
|
len=${#xs[@]}
|
||||||
|
|
||||||
|
# TODO-TRE: Maybe use more exhaustive logic instead of just length.
|
||||||
|
if [[ $len -eq 2 ]]; then
|
||||||
|
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a><a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/functional-combined/index.html">Latest FTR</a>"
|
||||||
|
else
|
||||||
|
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat <<EOF >src/dev/code_coverage/www/index_partial_2.html
|
||||||
|
${links}
|
||||||
</nav>
|
</nav>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
|
|
|
@ -2,19 +2,22 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
xs=("$@")
|
||||||
|
|
||||||
uploadPrefix="gs://elastic-bekitzur-kibana-coverage-live/"
|
uploadPrefix="gs://elastic-bekitzur-kibana-coverage-live/"
|
||||||
uploadPrefixWithTimeStamp="${uploadPrefix}${TIME_STAMP}/"
|
uploadPrefixWithTimeStamp="${uploadPrefix}${TIME_STAMP}/"
|
||||||
|
|
||||||
cat src/dev/code_coverage/www/index.html
|
uploadBase() {
|
||||||
|
for x in 'src/dev/code_coverage/www/index.html' 'src/dev/code_coverage/www/404.html'; do
|
||||||
|
gsutil -m -q cp -r -a public-read -z js,css,html "${x}" "${uploadPrefix}"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
for x in 'src/dev/code_coverage/www/index.html' 'src/dev/code_coverage/www/404.html'; do
|
uploadRest() {
|
||||||
gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefix}
|
for x in "${xs[@]}"; do
|
||||||
done
|
gsutil -m -q cp -r -a public-read -z js,css,html "target/kibana-coverage/${x}-combined" "${uploadPrefixWithTimeStamp}"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
#gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
|
uploadBase
|
||||||
#
|
uploadRest
|
||||||
#for x in 'target/kibana-coverage/functional-combined' 'target/kibana-coverage/jest-combined'; do
|
|
||||||
# gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
|
|
||||||
#done
|
|
||||||
|
|
||||||
gsutil -m -q cp -r -a public-read -z js,css,html 'target/kibana-coverage/jest-combined' ${uploadPrefixWithTimeStamp}
|
|
||||||
|
|
|
@ -2,15 +2,27 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
header() {
|
||||||
|
local fileName=$1
|
||||||
|
|
||||||
|
echo "" >"$fileName"
|
||||||
|
|
||||||
|
echo "### File Name:" >>"$fileName"
|
||||||
|
printf " %s\n\n" "$fileName" >>"$fileName"
|
||||||
|
}
|
||||||
|
|
||||||
# $1 file name, ex: "target/dir-listing-jest.txt"
|
# $1 file name, ex: "target/dir-listing-jest.txt"
|
||||||
# $2 directory to be listed, ex: target/kibana-coverage/jest
|
# $2 directory to be listed, ex: target/kibana-coverage/jest
|
||||||
dirListing() {
|
dirListing() {
|
||||||
local fileName=$1
|
local fileName=$1
|
||||||
local dir=$2
|
local dir=$2
|
||||||
|
|
||||||
ls -l "$dir" >"$fileName"
|
header "$fileName"
|
||||||
|
|
||||||
|
ls -l "$dir" >>"$fileName"
|
||||||
|
|
||||||
printf "\n### %s \n\tlisted to: %s\n" "$dir" "$fileName"
|
printf "\n### %s \n\tlisted to: %s\n" "$dir" "$fileName"
|
||||||
|
|
||||||
buildkite-agent artifact upload "$fileName"
|
buildkite-agent artifact upload "$fileName"
|
||||||
|
|
||||||
printf "\n### %s Uploaded\n" "$fileName"
|
printf "\n### %s Uploaded\n" "$fileName"
|
||||||
|
@ -29,15 +41,6 @@ replacePaths() {
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
header() {
|
|
||||||
local fileName=$1
|
|
||||||
|
|
||||||
echo "" >"$fileName"
|
|
||||||
|
|
||||||
echo "### File Name:" >>"$fileName"
|
|
||||||
printf "\t%s\n" "$fileName" >>"$fileName"
|
|
||||||
}
|
|
||||||
|
|
||||||
fileHeads() {
|
fileHeads() {
|
||||||
local fileName=$1
|
local fileName=$1
|
||||||
local dir=$2
|
local dir=$2
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue