mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[QA][Code Coverage] Modularize Code Coverage (#133759)
Kinda sneaky...since we already mutate the jest portion of the file system (target/kibana-coverage/jest) by dumping "jest unit" & "jest integration" coverage into the same "final" directory...go ahead an make "jest integration" use the same ran file designator as "jest unit". This saves me from having to add logic for this later on.
This commit is contained in:
parent
6e0086df00
commit
d92440e631
8 changed files with 147 additions and 88 deletions
|
@ -116,7 +116,7 @@ printf "%s\n" "${results[@]}"
|
|||
echo ""
|
||||
|
||||
# So the last step "knows" this config ran
|
||||
uploadRanFile "ftr_configs"
|
||||
uploadRanFile "functional"
|
||||
|
||||
# Force exit 0 to ensure the next build step starts.
|
||||
exit 0
|
||||
|
|
|
@ -8,59 +8,103 @@ source .buildkite/scripts/steps/code_coverage/merge.sh
|
|||
|
||||
export CODE_COVERAGE=1
|
||||
echo "--- Reading Kibana stats cluster creds from vault"
|
||||
export USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")
|
||||
|
||||
echo "--- Print KIBANA_DIR"
|
||||
echo "### KIBANA_DIR: $KIBANA_DIR"
|
||||
USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export USER_FROM_VAULT
|
||||
PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export PASS_FROM_VAULT
|
||||
HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
|
||||
export HOST_FROM_VAULT
|
||||
TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")
|
||||
export TIME_STAMP
|
||||
|
||||
echo "--- Download previous git sha"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/downloadPrevSha.sh
|
||||
previousSha=$(cat downloaded_previous.txt)
|
||||
PREVIOUS_SHA=$(cat downloaded_previous.txt)
|
||||
|
||||
echo "--- Upload new git sha"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/uploadPrevSha.sh
|
||||
|
||||
.buildkite/scripts/bootstrap.sh
|
||||
|
||||
echo "--- Download coverage artifacts"
|
||||
buildkite-agent artifact download target/kibana-coverage/jest/* .
|
||||
#buildkite-agent artifact download target/kibana-coverage/functional/* .
|
||||
collectRan() {
|
||||
buildkite-agent artifact download target/ran_files/* .
|
||||
ls -l target/ran_files/* || echo "### No ran-files found"
|
||||
|
||||
echo "--- process HTML Links"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh
|
||||
while read -r x; do
|
||||
ran=("${ran[@]}" "$(cat "$x")")
|
||||
done <<<"$(find target/ran_files -maxdepth 1 -type f -name '*.txt')"
|
||||
|
||||
echo "--- collect VCS Info"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh
|
||||
echo "--- Collected Ran files: ${ran[*]}"
|
||||
}
|
||||
|
||||
echo "--- Jest: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||
# Jest: Reset file paths prefix to Kibana Dir of final worker
|
||||
uniqueifyRanConfigs() {
|
||||
local xs=("$@")
|
||||
local xss
|
||||
xss=$(printf "%s\n" "${xs[@]}" | sort -u | tr '\n' ' ' | xargs) # xargs trims whitespace
|
||||
uniqRanConfigs=("$xss")
|
||||
echo "--- Uniq Ran files: ${uniqRanConfigs[*]}"
|
||||
}
|
||||
|
||||
fetchArtifacts() {
|
||||
echo "--- Fetch coverage artifacts"
|
||||
|
||||
local xs=("$@")
|
||||
for x in "${xs[@]}"; do
|
||||
buildkite-agent artifact download "target/kibana-coverage/${x}/*" .
|
||||
done
|
||||
}
|
||||
|
||||
archiveReports() {
|
||||
echo "--- Archive and upload combined reports"
|
||||
|
||||
local xs=("$@")
|
||||
for x in "${xs[@]}"; do
|
||||
echo "### Collect and Upload for: ${x}"
|
||||
# fileHeads "target/file-heads-archive-reports-for-${x}.txt" "target/kibana-coverage/${x}"
|
||||
# dirListing "target/dir-listing-${x}-combined-during-archiveReports.txt" target/kibana-coverage/${x}-combined
|
||||
# dirListing "target/dir-listing-${x}-during-archiveReports.txt" target/kibana-coverage/${x}
|
||||
collectAndUpload "target/kibana-coverage/${x}/kibana-${x}-coverage.tar.gz" "target/kibana-coverage/${x}-combined"
|
||||
done
|
||||
}
|
||||
|
||||
mergeAll() {
|
||||
local xs=("$@")
|
||||
|
||||
for x in "${xs[@]}"; do
|
||||
if [ "$x" == "jest" ]; then
|
||||
echo "--- [$x]: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||
replacePaths "$KIBANA_DIR/target/kibana-coverage/jest" "CC_REPLACEMENT_ANCHOR" "$KIBANA_DIR"
|
||||
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.jest.config.js
|
||||
elif [ "$x" == "functional" ]; then
|
||||
echo "---[$x] : Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||
set +e
|
||||
sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
|
||||
echo "--- Begin Split and Merge for Functional"
|
||||
splitCoverage target/kibana-coverage/functional
|
||||
splitMerge
|
||||
set -e
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
#echo "--- Functional: Reset file paths prefix, merge coverage files, and generate the final combined report"
|
||||
# Functional: Reset file paths prefix to Kibana Dir of final worker
|
||||
#set +e
|
||||
#sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
|
||||
#echo "--- Begin Split and Merge for Functional"
|
||||
#splitCoverage target/kibana-coverage/functional
|
||||
#splitMerge
|
||||
#set -e
|
||||
|
||||
echo "--- Archive and upload combined reports"
|
||||
collectAndUpload target/kibana-coverage/jest/kibana-jest-coverage.tar.gz \
|
||||
target/kibana-coverage/jest-combined
|
||||
#collectAndUpload target/kibana-coverage/functional/kibana-functional-coverage.tar.gz \
|
||||
# target/kibana-coverage/functional-combined
|
||||
|
||||
echo "--- Upload coverage static site"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh
|
||||
|
||||
echo "--- Ingest results to Kibana stats cluster"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
|
||||
${BUILDKITE_BUILD_NUMBER} ${BUILDKITE_BUILD_URL} ${previousSha} \
|
||||
modularize() {
|
||||
collectRan
|
||||
if [ -d target/ran_files ]; then
|
||||
uniqueifyRanConfigs "${ran[@]}"
|
||||
fetchArtifacts "${uniqRanConfigs[@]}"
|
||||
mergeAll "${uniqRanConfigs[@]}"
|
||||
archiveReports "${uniqRanConfigs[@]}"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh "${uniqRanConfigs[@]}"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh "${uniqRanConfigs[@]}"
|
||||
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh
|
||||
source .buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
|
||||
"${BUILDKITE_BUILD_NUMBER}" "${BUILDKITE_BUILD_URL}" "${PREVIOUS_SHA}" \
|
||||
'src/dev/code_coverage/ingest_coverage/team_assignment/team_assignments.txt'
|
||||
ingestModular "${uniqRanConfigs[@]}"
|
||||
else
|
||||
echo "--- Found zero configs that ran, cancelling ingestion."
|
||||
exit 11
|
||||
fi
|
||||
}
|
||||
|
||||
modularize
|
||||
echo "### unique ran configs: ${uniqRanConfigs[*]}"
|
||||
|
|
|
@ -15,4 +15,4 @@ echo '--- Jest Integration code coverage'
|
|||
.buildkite/scripts/steps/code_coverage/jest_parallel.sh jest.integration.config.js
|
||||
|
||||
# So the last step "knows" this config ran
|
||||
uploadRanFile "jest_integration"
|
||||
uploadRanFile "jest"
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
echo "--- collect VCS Info"
|
||||
|
||||
echo "### Prok'd Index File: ..."
|
||||
cat src/dev/code_coverage/www/index.html
|
||||
|
||||
|
|
|
@ -2,9 +2,6 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
echo "### Ingesting Code Coverage"
|
||||
echo ""
|
||||
|
||||
COVERAGE_JOB_NAME=$1
|
||||
export COVERAGE_JOB_NAME
|
||||
echo "### debug COVERAGE_JOB_NAME: ${COVERAGE_JOB_NAME}"
|
||||
|
@ -31,27 +28,25 @@ echo "### debug TEAM_ASSIGN_PATH: ${TEAM_ASSIGN_PATH}"
|
|||
|
||||
BUFFER_SIZE=500
|
||||
export BUFFER_SIZE
|
||||
echo "### debug BUFFER_SIZE: ${BUFFER_SIZE}"
|
||||
|
||||
# Build team assignments file
|
||||
echo "### Generate Team Assignments"
|
||||
ingestModular() {
|
||||
local xs=("$@")
|
||||
|
||||
echo "--- Generate Team Assignments"
|
||||
CI_STATS_DISABLED=true node scripts/generate_team_assignments.js \
|
||||
--verbose --src '.github/CODEOWNERS' --dest $TEAM_ASSIGN_PATH
|
||||
--verbose --src '.github/CODEOWNERS' --dest "$TEAM_ASSIGN_PATH"
|
||||
|
||||
#for x in functional jest; do
|
||||
# echo "### Ingesting coverage for ${x}"
|
||||
# COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"
|
||||
#
|
||||
# CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
|
||||
# --vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH &
|
||||
#done
|
||||
#wait
|
||||
echo "--- Ingest results to Kibana stats cluster"
|
||||
for x in "${xs[@]}"; do
|
||||
echo "--- Ingesting coverage for ${x}"
|
||||
|
||||
echo "### Ingesting coverage for JEST"
|
||||
COVERAGE_SUMMARY_FILE="target/kibana-coverage/jest-combined/coverage-summary.json"
|
||||
COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"
|
||||
|
||||
CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
|
||||
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH
|
||||
CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path "${COVERAGE_SUMMARY_FILE}" \
|
||||
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath "$TEAM_ASSIGN_PATH" &
|
||||
done
|
||||
wait
|
||||
|
||||
echo "--- Ingesting Code Coverage - Complete"
|
||||
echo ""
|
||||
}
|
||||
|
|
|
@ -2,8 +2,20 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
echo "--- process HTML Links"
|
||||
|
||||
xs=("$@")
|
||||
len=${#xs[@]}
|
||||
|
||||
# TODO-TRE: Maybe use more exhaustive logic instead of just length.
|
||||
if [[ $len -eq 2 ]]; then
|
||||
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a><a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/functional-combined/index.html">Latest FTR</a>"
|
||||
else
|
||||
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>"
|
||||
fi
|
||||
|
||||
cat <<EOF >src/dev/code_coverage/www/index_partial_2.html
|
||||
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>
|
||||
${links}
|
||||
</nav>
|
||||
</div>
|
||||
</header>
|
||||
|
|
|
@ -2,19 +2,22 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
xs=("$@")
|
||||
|
||||
uploadPrefix="gs://elastic-bekitzur-kibana-coverage-live/"
|
||||
uploadPrefixWithTimeStamp="${uploadPrefix}${TIME_STAMP}/"
|
||||
|
||||
cat src/dev/code_coverage/www/index.html
|
||||
|
||||
uploadBase() {
|
||||
for x in 'src/dev/code_coverage/www/index.html' 'src/dev/code_coverage/www/404.html'; do
|
||||
gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefix}
|
||||
gsutil -m -q cp -r -a public-read -z js,css,html "${x}" "${uploadPrefix}"
|
||||
done
|
||||
}
|
||||
|
||||
#gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
|
||||
#
|
||||
#for x in 'target/kibana-coverage/functional-combined' 'target/kibana-coverage/jest-combined'; do
|
||||
# gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
|
||||
#done
|
||||
uploadRest() {
|
||||
for x in "${xs[@]}"; do
|
||||
gsutil -m -q cp -r -a public-read -z js,css,html "target/kibana-coverage/${x}-combined" "${uploadPrefixWithTimeStamp}"
|
||||
done
|
||||
}
|
||||
|
||||
gsutil -m -q cp -r -a public-read -z js,css,html 'target/kibana-coverage/jest-combined' ${uploadPrefixWithTimeStamp}
|
||||
uploadBase
|
||||
uploadRest
|
||||
|
|
|
@ -2,15 +2,27 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
header() {
|
||||
local fileName=$1
|
||||
|
||||
echo "" >"$fileName"
|
||||
|
||||
echo "### File Name:" >>"$fileName"
|
||||
printf " %s\n\n" "$fileName" >>"$fileName"
|
||||
}
|
||||
|
||||
# $1 file name, ex: "target/dir-listing-jest.txt"
|
||||
# $2 directory to be listed, ex: target/kibana-coverage/jest
|
||||
dirListing() {
|
||||
local fileName=$1
|
||||
local dir=$2
|
||||
|
||||
ls -l "$dir" >"$fileName"
|
||||
header "$fileName"
|
||||
|
||||
ls -l "$dir" >>"$fileName"
|
||||
|
||||
printf "\n### %s \n\tlisted to: %s\n" "$dir" "$fileName"
|
||||
|
||||
buildkite-agent artifact upload "$fileName"
|
||||
|
||||
printf "\n### %s Uploaded\n" "$fileName"
|
||||
|
@ -29,15 +41,6 @@ replacePaths() {
|
|||
done
|
||||
}
|
||||
|
||||
header() {
|
||||
local fileName=$1
|
||||
|
||||
echo "" >"$fileName"
|
||||
|
||||
echo "### File Name:" >>"$fileName"
|
||||
printf "\t%s\n" "$fileName" >>"$fileName"
|
||||
}
|
||||
|
||||
fileHeads() {
|
||||
local fileName=$1
|
||||
local dir=$2
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue