Compare commits

..

1 commit

Author SHA1 Message Date
James Deng
7829523a9f Update for v2.0.4 2024-12-11 19:19:36 +08:00
6332 changed files with 1725408 additions and 496992 deletions

View file

@ -40,11 +40,11 @@ meson.build @dbaker @eric
##########
# NIR
/src/compiler/nir/ @jekstrand
/src/compiler/nir/ @gfxstrand
# Vulkan
/src/vulkan/ @eric @jekstrand
/include/vulkan/ @eric @jekstrand
/src/vulkan/ @eric @gfxstrand
/include/vulkan/ @eric @gfxstrand
#############
@ -79,12 +79,9 @@ meson.build @dbaker @eric
/src/glx/*glvnd* @kbrenneman
# Haiku
/include/HaikuGL/ @kallisti5
/src/egl/drivers/haiku/ @kallisti5
/src/gallium/frontends/hgl/ @kallisti5
/src/gallium/targets/haiku-softpipe/ @kallisti5
/src/gallium/winsys/sw/hgl/ @kallisti5
/src/hgl/ @kallisti5
# Loader - DRI/classic
/src/loader/ @xexaxo
@ -123,16 +120,16 @@ meson.build @dbaker @eric
/src/gallium/drivers/freedreno/ @robclark
# Imagination
/include/drm-uapi/pvr_drm.h @CreativeCylon @frankbinns
/src/imagination/ @CreativeCylon @frankbinns
/include/drm-uapi/pvr_drm.h @CreativeCylon @frankbinns @MTCoster
/src/imagination/ @CreativeCylon @frankbinns @MTCoster
/src/imagination/rogue/ @simon-perretta-img
# Intel
/include/drm-uapi/i915_drm.h @kwg @llandwerlin @jekstrand @idr
/include/pci_ids/i*_pci_ids.h @kwg @llandwerlin @jekstrand @idr
/src/intel/ @kwg @llandwerlin @jekstrand @idr
/src/gallium/winsys/iris/ @kwg @llandwerlin @jekstrand @idr
/src/gallium/drivers/iris/ @kwg @llandwerlin @jekstrand @idr
/include/drm-uapi/i915_drm.h @kwg @llandwerlin @gfxstrand @idr
/include/pci_ids/i*_pci_ids.h @kwg @llandwerlin @gfxstrand @idr
/src/intel/ @kwg @llandwerlin @gfxstrand @idr
/src/gallium/winsys/iris/ @kwg @llandwerlin @gfxstrand @idr
/src/gallium/drivers/iris/ @kwg @llandwerlin @gfxstrand @idr
/src/gallium/drivers/i915/ @anholt
# Microsoft
@ -140,9 +137,16 @@ meson.build @dbaker @eric
/src/gallium/drivers/d3d12/ @jenatali
# Panfrost
/src/panfrost/ @alyssa
/src/panfrost/vulkan/ @bbrezillon
/src/gallium/drivers/panfrost/ @alyssa
/src/panfrost/ @bbrezillon
/src/panfrost/midgard @italove
/src/gallium/drivers/panfrost/ @bbrezillon
# R300
/src/gallium/drivers/r300/ @ondracka @gawin
# VirGL - Video
/src/gallium/drivers/virgl/virgl_video.* @flynnjiang
/src/virtio/virtio-gpu/virgl_video_hw.h @flynnjiang
# VMware
/src/gallium/drivers/svga/ @brianp @charmainel

View file

@ -1 +1 @@
22.3.5
24.0.1

View file

@ -34,15 +34,15 @@ MESA_VK_LIB_SUFFIX_intel_hasvk := intel_hasvk
MESA_VK_LIB_SUFFIX_freedreno := freedreno
MESA_VK_LIB_SUFFIX_broadcom := broadcom
MESA_VK_LIB_SUFFIX_panfrost := panfrost
MESA_VK_LIB_SUFFIX_virtio-experimental := virtio
MESA_VK_LIB_SUFFIX_virtio := virtio
MESA_VK_LIB_SUFFIX_swrast := lvp
include $(CLEAR_VARS)
LOCAL_SHARED_LIBRARIES := libc libdl libdrm libm liblog libcutils libz libc++ libnativewindow libsync libhardware
LOCAL_STATIC_LIBRARIES := libexpat libarect libelf
LOCAL_HEADER_LIBRARIES := libnativebase_headers hwvulkan_headers libbacktrace_headers
MESON_GEN_PKGCONFIGS := backtrace cutils expat hardware libdrm:$(LIBDRM_VERSION) nativewindow sync zlib:1.2.11 libelf
LOCAL_HEADER_LIBRARIES := libnativebase_headers hwvulkan_headers
MESON_GEN_PKGCONFIGS := cutils expat hardware libdrm:$(LIBDRM_VERSION) nativewindow sync zlib:1.2.11 libelf
LOCAL_CFLAGS += $(BOARD_MESA3D_CFLAGS)
ifneq ($(filter swrast,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
@ -61,9 +61,15 @@ LOCAL_SHARED_LIBRARIES += libdrm_intel
MESON_GEN_PKGCONFIGS += libdrm_intel:$(LIBDRM_VERSION)
endif
ifneq ($(filter radeonsi amd,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
MESON_GEN_LLVM_STUB := true
ifneq ($(filter radeonsi,$(BOARD_MESA3D_GALLIUM_DRIVERS)),)
ifneq ($(MESON_GEN_LLVM_STUB),)
LOCAL_CFLAGS += -DFORCE_BUILD_AMDGPU # instructs LLVM to declare LLVMInitializeAMDGPU* functions
# The flag is required for the Android-x86 LLVM port that follows the AOSP LLVM porting rules
# https://osdn.net/projects/android-x86/scm/git/external-llvm-project
endif
endif
ifneq ($(filter radeonsi amd,$(BOARD_MESA3D_GALLIUM_DRIVERS) $(BOARD_MESA3D_VULKAN_DRIVERS)),)
LOCAL_SHARED_LIBRARIES += libdrm_amdgpu
MESON_GEN_PKGCONFIGS += libdrm_amdgpu:$(LIBDRM_VERSION)
endif
@ -158,6 +164,7 @@ include $(BUILD_PREBUILT)
endif
endef
ifneq ($(strip $(BOARD_MESA3D_GALLIUM_DRIVERS)),)
# Module 'libgallium_dri', produces '/vendor/lib{64}/dri/libgallium_dri.so'
# This module also trigger DRI symlinks creation process
$(eval $(call mesa3d-lib,libgallium_dri,.so.0,dri,MESA3D_GALLIUM_DRI_BIN))
@ -170,6 +177,7 @@ $(eval $(call mesa3d-lib,libEGL_mesa,.so.1,egl,MESA3D_LIBEGL_BIN))
$(eval $(call mesa3d-lib,libGLESv1_CM_mesa,.so.1,egl,MESA3D_LIBGLESV1_BIN))
# Module 'libGLESv2_mesa', produces '/vendor/lib{64}/egl/libGLESv2_mesa.so'
$(eval $(call mesa3d-lib,libGLESv2_mesa,.so.2,egl,MESA3D_LIBGLESV2_BIN))
endif
# Modules 'vulkan.{driver_name}', produces '/vendor/lib{64}/hw/vulkan.{driver_name}.so' HAL
$(foreach driver,$(BOARD_MESA3D_VULKAN_DRIVERS), \

View file

@ -88,9 +88,12 @@ MESON_GEN_NINJA := \
-Dgallium-drivers=$(subst $(space),$(comma),$(BOARD_MESA3D_GALLIUM_DRIVERS)) \
-Dvulkan-drivers=$(subst $(space),$(comma),$(subst radeon,amd,$(BOARD_MESA3D_VULKAN_DRIVERS))) \
-Dgbm=enabled \
-Degl=enabled \
-Degl=$(if $(BOARD_MESA3D_GALLIUM_DRIVERS),enabled,disabled) \
-Dllvm=$(if $(MESON_GEN_LLVM_STUB),enabled,disabled) \
-Dcpp_rtti=false \
-Dlmsensors=disabled \
-Dandroid-libbacktrace=disabled \
$(BOARD_MESA3D_MESON_ARGS) \
MESON_BUILD := PATH=/usr/bin:/bin:/sbin:$$PATH ninja -C $(MESON_OUT_DIR)/build
@ -202,7 +205,9 @@ define m-c-flags
endef
define filter-c-flags
$(filter-out -std=gnu++17 -std=gnu++14 -std=gnu99 -fno-rtti, \
$(filter-out -std=gnu++17 -std=gnu++14 -std=gnu99 -fno-rtti \
-enable-trivial-auto-var-init-zero-knowing-it-will-be-removed-from-clang \
-ftrivial-auto-var-init=zero,
$(patsubst -W%,, $1))
endef
@ -288,7 +293,7 @@ $(MESON_OUT_DIR)/install/.install.timestamp: $(MESON_OUT_DIR)/.build.timestamp
rm -rf $(dir $@)
mkdir -p $(dir $@)
DESTDIR=$(call relative-to-absolute,$(dir $@)) $(MESON_BUILD) install
$(MESON_COPY_LIBGALLIUM)
$(if $(BOARD_MESA3D_GALLIUM_DRIVERS),$(MESON_COPY_LIBGALLIUM))
touch $@
$($(M_TARGET_PREFIX)MESA3D_LIBGBM_BIN) $(MESA3D_GLES_BINS): $(MESON_OUT_DIR)/install/.install.timestamp

2
bin/ci/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
schema.graphql
gitlab_gql.py.cache*

413
bin/ci/ci_run_n_monitor.py Executable file
View file

@ -0,0 +1,413 @@
#!/usr/bin/env python3
# Copyright © 2020 - 2022 Collabora Ltd.
# Authors:
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
# David Heidelberg <david.heidelberg@collabora.com>
#
# For the dependencies, see the requirements.txt
# SPDX-License-Identifier: MIT
"""
Helper script to restrict running only required CI jobs
and show the job(s) logs.
"""
import argparse
import re
import sys
import time
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from itertools import chain
from subprocess import check_output
from typing import TYPE_CHECKING, Iterable, Literal, Optional
import gitlab
from colorama import Fore, Style
from gitlab_common import (
get_gitlab_project,
read_token,
wait_for_pipeline,
pretty_duration,
)
from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag
if TYPE_CHECKING:
from gitlab_gql import Dag
GITLAB_URL = "https://gitlab.freedesktop.org"
REFRESH_WAIT_LOG = 10
REFRESH_WAIT_JOBS = 6
URL_START = "\033]8;;"
URL_END = "\033]8;;\a"
STATUS_COLORS = {
"created": "",
"running": Fore.BLUE,
"success": Fore.GREEN,
"failed": Fore.RED,
"canceled": Fore.MAGENTA,
"manual": "",
"pending": "",
"skipped": "",
}
COMPLETED_STATUSES = ["success", "failed"]
def print_job_status(job, new_status=False) -> None:
"""It prints a nice, colored job status with a link to the job."""
if job.status == "canceled":
return
if job.duration:
duration = job.duration
elif job.started_at:
duration = time.perf_counter() - time.mktime(job.started_at.timetuple())
print(
STATUS_COLORS[job.status]
+ "🞋 job "
+ URL_START
+ f"{job.web_url}\a{job.name}"
+ URL_END
+ (f" has new status: {job.status}" if new_status else f" :: {job.status}")
+ (f" ({pretty_duration(duration)})" if job.started_at else "")
+ Style.RESET_ALL
)
def pretty_wait(sec: int) -> None:
"""shows progressbar in dots"""
for val in range(sec, 0, -1):
print(f"{val} seconds", end="\r")
time.sleep(1)
def monitor_pipeline(
project,
pipeline,
target_jobs_regex: re.Pattern,
dependencies,
force_manual: bool,
stress: int,
) -> tuple[Optional[int], Optional[int]]:
"""Monitors pipeline and delegate canceling jobs"""
statuses: dict[str, str] = defaultdict(str)
target_statuses: dict[str, str] = defaultdict(str)
stress_status_counter = defaultdict(lambda: defaultdict(int))
target_id = None
while True:
deps_failed = []
to_cancel = []
for job in pipeline.jobs.list(all=True, sort="desc"):
# target jobs
if target_jobs_regex.fullmatch(job.name):
target_id = job.id
if stress and job.status in ["success", "failed"]:
if (
stress < 0
or sum(stress_status_counter[job.name].values()) < stress
):
enable_job(project, job, "retry", force_manual)
stress_status_counter[job.name][job.status] += 1
else:
enable_job(project, job, "target", force_manual)
print_job_status(job, job.status not in target_statuses[job.name])
target_statuses[job.name] = job.status
continue
# all jobs
if job.status != statuses[job.name]:
print_job_status(job, True)
statuses[job.name] = job.status
# run dependencies and cancel the rest
if job.name in dependencies:
enable_job(project, job, "dep", True)
if job.status == "failed":
deps_failed.append(job.name)
else:
to_cancel.append(job)
cancel_jobs(project, to_cancel)
if stress:
enough = True
for job_name, status in stress_status_counter.items():
print(
f"{job_name}\tsucc: {status['success']}; "
f"fail: {status['failed']}; "
f"total: {sum(status.values())} of {stress}",
flush=False,
)
if stress < 0 or sum(status.values()) < stress:
enough = False
if not enough:
pretty_wait(REFRESH_WAIT_JOBS)
continue
print("---------------------------------", flush=False)
if len(target_statuses) == 1 and {"running"}.intersection(
target_statuses.values()
):
return target_id, None
if (
{"failed"}.intersection(target_statuses.values())
and not set(["running", "pending"]).intersection(target_statuses.values())
):
return None, 1
if (
{"skipped"}.intersection(target_statuses.values())
and not {"running", "pending"}.intersection(target_statuses.values())
):
print(
Fore.RED,
"Target in skipped state, aborting. Failed dependencies:",
deps_failed,
Fore.RESET,
)
return None, 1
if {"success", "manual"}.issuperset(target_statuses.values()):
return None, 0
pretty_wait(REFRESH_WAIT_JOBS)
def enable_job(
project, job, action_type: Literal["target", "dep", "retry"], force_manual: bool
) -> None:
"""enable job"""
if (
(job.status in ["success", "failed"] and action_type != "retry")
or (job.status == "manual" and not force_manual)
or job.status in ["skipped", "running", "created", "pending"]
):
return
pjob = project.jobs.get(job.id, lazy=True)
if job.status in ["success", "failed", "canceled"]:
pjob.retry()
else:
pjob.play()
if action_type == "target":
jtype = "🞋 "
elif action_type == "retry":
jtype = ""
else:
jtype = "(dependency)"
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
def cancel_job(project, job) -> None:
"""Cancel GitLab job"""
if job.status in [
"canceled",
"success",
"failed",
"skipped",
]:
return
pjob = project.jobs.get(job.id, lazy=True)
pjob.cancel()
print(f"{job.name}", end=" ")
def cancel_jobs(project, to_cancel) -> None:
"""Cancel unwanted GitLab jobs"""
if not to_cancel:
return
with ThreadPoolExecutor(max_workers=6) as exe:
part = partial(cancel_job, project)
exe.map(part, to_cancel)
print()
def print_log(project, job_id) -> None:
"""Print job log into output"""
printed_lines = 0
while True:
job = project.jobs.get(job_id)
# GitLab's REST API doesn't offer pagination for logs, so we have to refetch it all
lines = job.trace().decode("raw_unicode_escape").splitlines()
for line in lines[printed_lines:]:
print(line)
printed_lines = len(lines)
if job.status in COMPLETED_STATUSES:
print(Fore.GREEN + f"Job finished: {job.web_url}" + Style.RESET_ALL)
return
pretty_wait(REFRESH_WAIT_LOG)
def parse_args() -> None:
"""Parse args"""
parser = argparse.ArgumentParser(
description="Tool to trigger a subset of container jobs "
+ "and monitor the progress of a test job",
epilog="Example: mesa-monitor.py --rev $(git rev-parse HEAD) "
+ '--target ".*traces" ',
)
parser.add_argument(
"--target",
metavar="target-job",
help="Target job regex. For multiple targets, separate with pipe | character",
required=True,
)
parser.add_argument(
"--token",
metavar="token",
help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
)
parser.add_argument(
"--force-manual", action="store_true", help="Force jobs marked as manual"
)
parser.add_argument(
"--stress",
default=0,
type=int,
help="Stresstest job(s). Number or repetitions or -1 for infinite.",
)
parser.add_argument(
"--project",
default="mesa",
help="GitLab project in the format <user>/<project> or just <project>",
)
mutex_group1 = parser.add_mutually_exclusive_group()
mutex_group1.add_argument(
"--rev", default="HEAD", metavar="revision", help="repository git revision (default: HEAD)"
)
mutex_group1.add_argument(
"--pipeline-url",
help="URL of the pipeline to use, instead of auto-detecting it.",
)
mutex_group1.add_argument(
"--mr",
type=int,
help="ID of a merge request; the latest pipeline in that MR will be used.",
)
args = parser.parse_args()
# argparse doesn't support groups inside add_mutually_exclusive_group(),
# which means we can't just put `--project` and `--rev` in a group together,
# we have to do this by heand instead.
if args.pipeline_url and args.project != parser.get_default("project"):
# weird phrasing but it's the error add_mutually_exclusive_group() gives
parser.error("argument --project: not allowed with argument --pipeline-url")
return args
def print_detected_jobs(
target_dep_dag: "Dag", dependency_jobs: Iterable[str], target_jobs: Iterable[str]
) -> None:
def print_job_set(color: str, kind: str, job_set: Iterable[str]):
print(
color + f"Running {len(job_set)} {kind} jobs: ",
"\n",
", ".join(sorted(job_set)),
Fore.RESET,
"\n",
)
print(Fore.YELLOW + "Detected target job and its dependencies:", "\n")
print_dag(target_dep_dag)
print_job_set(Fore.MAGENTA, "dependency", dependency_jobs)
print_job_set(Fore.BLUE, "target", target_jobs)
def find_dependencies(target_jobs_regex: re.Pattern, project_path: str, iid: int) -> set[str]:
gql_instance = GitlabGQL()
dag = create_job_needs_dag(
gql_instance, {"projectPath": project_path.path_with_namespace, "iid": iid}
)
target_dep_dag = filter_dag(dag, target_jobs_regex)
if not target_dep_dag:
print(Fore.RED + "The job(s) were not found in the pipeline." + Fore.RESET)
sys.exit(1)
dependency_jobs = set(chain.from_iterable(d["needs"] for d in target_dep_dag.values()))
target_jobs = set(target_dep_dag.keys())
print_detected_jobs(target_dep_dag, dependency_jobs, target_jobs)
return target_jobs.union(dependency_jobs)
if __name__ == "__main__":
try:
t_start = time.perf_counter()
args = parse_args()
token = read_token(args.token)
gl = gitlab.Gitlab(url=GITLAB_URL,
private_token=token,
retry_transient_errors=True)
REV: str = args.rev
if args.pipeline_url:
assert args.pipeline_url.startswith(GITLAB_URL)
url_path = args.pipeline_url[len(GITLAB_URL):]
url_path_components = url_path.split("/")
project_name = "/".join(url_path_components[1:3])
assert url_path_components[3] == "-"
assert url_path_components[4] == "pipelines"
pipeline_id = int(url_path_components[5])
cur_project = gl.projects.get(project_name)
pipe = cur_project.pipelines.get(pipeline_id)
REV = pipe.sha
else:
mesa_project = gl.projects.get("mesa/mesa")
projects = [mesa_project]
if args.mr:
REV = mesa_project.mergerequests.get(args.mr).sha
else:
REV = check_output(['git', 'rev-parse', REV]).decode('ascii').strip()
projects.append(get_gitlab_project(gl, args.project))
(pipe, cur_project) = wait_for_pipeline(projects, REV)
print(f"Revision: {REV}")
print(f"Pipeline: {pipe.web_url}")
target_jobs_regex = re.compile(args.target.strip())
deps = set()
if args.target:
print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
deps = find_dependencies(
target_jobs_regex=target_jobs_regex, iid=pipe.iid, project_path=cur_project
)
target_job_id, ret = monitor_pipeline(
cur_project, pipe, target_jobs_regex, deps, args.force_manual, args.stress
)
if target_job_id:
print_log(cur_project, target_job_id)
t_end = time.perf_counter()
spend_minutes = (t_end - t_start) / 60
print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes")
sys.exit(ret)
except KeyboardInterrupt:
sys.exit(1)

10
bin/ci/ci_run_n_monitor.sh Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/../python-venv.sh" \
"$this_dir/requirements.txt" \
"$this_dir/ci_run_n_monitor.py" "$@"

334
bin/ci/custom_logger.py Normal file
View file

@ -0,0 +1,334 @@
import argparse
import logging
from datetime import datetime
from pathlib import Path
from structured_logger import StructuredLogger
class CustomLogger:
def __init__(self, log_file):
self.log_file = log_file
self.logger = StructuredLogger(file_name=self.log_file)
def get_last_dut_job(self):
"""
Gets the details of the most recent DUT job.
Returns:
dict: Details of the most recent DUT job.
Raises:
ValueError: If no DUT jobs are found in the logger's data.
"""
try:
job = self.logger.data["dut_jobs"][-1]
except KeyError:
raise ValueError(
"No DUT jobs found. Please create a job via create_dut_job call."
)
return job
def update(self, **kwargs):
"""
Updates the log file with provided key-value pairs.
Args:
**kwargs: Key-value pairs to be updated.
"""
with self.logger.edit_context():
for key, value in kwargs.items():
self.logger.data[key] = value
def create_dut_job(self, **kwargs):
"""
Creates a new DUT job with provided key-value pairs.
Args:
**kwargs: Key-value pairs for the new DUT job.
"""
with self.logger.edit_context():
if "dut_jobs" not in self.logger.data:
self.logger.data["dut_jobs"] = []
new_job = {
"status": "",
"submitter_start_time": datetime.now().isoformat(),
"dut_submit_time": "",
"dut_start_time": "",
"dut_end_time": "",
"dut_name": "",
"dut_state": "pending",
"dut_job_phases": [],
**kwargs,
}
self.logger.data["dut_jobs"].append(new_job)
def update_dut_job(self, key, value):
"""
Updates the last DUT job with a key-value pair.
Args:
key : The key to be updated.
value: The value to be assigned.
"""
with self.logger.edit_context():
job = self.get_last_dut_job()
job[key] = value
def update_status_fail(self, reason=""):
"""
Sets the status of the last DUT job to 'fail' and logs the failure reason.
Args:
reason (str, optional): The reason for the failure. Defaults to "".
"""
with self.logger.edit_context():
job = self.get_last_dut_job()
job["status"] = "fail"
job["dut_job_fail_reason"] = reason
def create_job_phase(self, phase_name):
"""
Creates a new job phase for the last DUT job.
Args:
phase_name : The name of the new job phase.
"""
with self.logger.edit_context():
job = self.get_last_dut_job()
if job["dut_job_phases"] and job["dut_job_phases"][-1]["end_time"] == "":
# If the last phase exists and its end time is empty, set the end time
job["dut_job_phases"][-1]["end_time"] = datetime.now().isoformat()
# Create a new phase
phase_data = {
"name": phase_name,
"start_time": datetime.now().isoformat(),
"end_time": "",
}
job["dut_job_phases"].append(phase_data)
def check_dut_timings(self, job):
"""
Check the timing sequence of a job to ensure logical consistency.
The function verifies that the job's submission time is not earlier than its start time and that
the job's end time is not earlier than its start time. If either of these conditions is found to be true,
an error is logged for each instance of inconsistency.
Args:
job (dict): A dictionary containing timing information of a job. Expected keys are 'dut_start_time',
'dut_submit_time', and 'dut_end_time'.
Returns:
None: This function does not return a value; it logs errors if timing inconsistencies are detected.
The function checks the following:
- If 'dut_start_time' and 'dut_submit_time' are both present and correctly sequenced.
- If 'dut_start_time' and 'dut_end_time' are both present and correctly sequenced.
"""
# Check if the start time and submit time exist
if job.get("dut_start_time") and job.get("dut_submit_time"):
# If they exist, check if the submission time is before the start time
if job["dut_start_time"] < job["dut_submit_time"]:
logging.error("Job submission is happening before job start.")
# Check if the start time and end time exist
if job.get("dut_start_time") and job.get("dut_end_time"):
# If they exist, check if the end time is after the start time
if job["dut_end_time"] < job["dut_start_time"]:
logging.error("Job ended before it started.")
# Method to update DUT start, submit and end time
def update_dut_time(self, value, custom_time):
"""
Updates DUT start, submit, and end times.
Args:
value : Specifies which DUT time to update. Options: 'start', 'submit', 'end'.
custom_time : Custom time to set. If None, use current time.
Raises:
ValueError: If an invalid argument is provided for value.
"""
with self.logger.edit_context():
job = self.get_last_dut_job()
timestamp = custom_time if custom_time else datetime.now().isoformat()
if value == "start":
job["dut_start_time"] = timestamp
job["dut_state"] = "running"
elif value == "submit":
job["dut_submit_time"] = timestamp
job["dut_state"] = "submitted"
elif value == "end":
job["dut_end_time"] = timestamp
job["dut_state"] = "finished"
else:
raise ValueError(
"Error: Invalid argument provided for --update-dut-time. Use 'start', 'submit', 'end'."
)
# check the sanity of the partial structured log
self.check_dut_timings(job)
def close_dut_job(self):
"""
Closes the most recent DUT (Device Under Test) job in the logger's data.
The method performs the following operations:
1. Validates if there are any DUT jobs in the logger's data.
2. If the last phase of the most recent DUT job has an empty end time, it sets the end time to the current time.
Raises:
ValueError: If no DUT jobs are found in the logger's data.
"""
with self.logger.edit_context():
job = self.get_last_dut_job()
# Check if the last phase exists and its end time is empty, then set the end time
if job["dut_job_phases"] and job["dut_job_phases"][-1]["end_time"] == "":
job["dut_job_phases"][-1]["end_time"] = datetime.now().isoformat()
def close(self):
"""
Closes the most recent DUT (Device Under Test) job in the logger's data.
The method performs the following operations:
1. Determines the combined status of all DUT jobs.
2. Sets the submitter's end time to the current time.
3. Updates the DUT attempt counter to reflect the total number of DUT jobs.
"""
with self.logger.edit_context():
job_status = []
for job in self.logger.data["dut_jobs"]:
if "status" in job:
job_status.append(job["status"])
if not job_status:
job_combined_status = "null"
else:
# Get job_combined_status
if "pass" in job_status:
job_combined_status = "pass"
else:
job_combined_status = "fail"
self.logger.data["job_combined_status"] = job_combined_status
self.logger.data["dut_attempt_counter"] = len(self.logger.data["dut_jobs"])
job["submitter_end_time"] = datetime.now().isoformat()
def process_args(args):
# Function to process key-value pairs and call corresponding logger methods
def process_key_value_pairs(args_list, action_func):
if not args_list:
raise ValueError(
f"No key-value pairs provided for {action_func.__name__.replace('_', '-')}"
)
if len(args_list) % 2 != 0:
raise ValueError(
f"Incomplete key-value pairs for {action_func.__name__.replace('_', '-')}"
)
kwargs = dict(zip(args_list[::2], args_list[1::2]))
action_func(**kwargs)
# Create a CustomLogger object with the specified log file path
custom_logger = CustomLogger(Path(args.log_file))
if args.update:
process_key_value_pairs(args.update, custom_logger.update)
if args.create_dut_job:
process_key_value_pairs(args.create_dut_job, custom_logger.create_dut_job)
if args.update_dut_job:
key, value = args.update_dut_job
custom_logger.update_dut_job(key, value)
if args.create_job_phase:
custom_logger.create_job_phase(args.create_job_phase)
if args.update_status_fail:
custom_logger.update_status_fail(args.update_status_fail)
if args.update_dut_time:
if len(args.update_dut_time) == 2:
action, custom_time = args.update_dut_time
elif len(args.update_dut_time) == 1:
action, custom_time = args.update_dut_time[0], None
else:
raise ValueError("Invalid number of values for --update-dut-time")
if action in ["start", "end", "submit"]:
custom_logger.update_dut_time(action, custom_time)
else:
raise ValueError(
"Error: Invalid argument provided for --update-dut-time. Use 'start', 'submit', 'end'."
)
if args.close_dut_job:
custom_logger.close_dut_job()
if args.close:
custom_logger.close()
def main():
parser = argparse.ArgumentParser(description="Custom Logger Command Line Tool")
parser.add_argument("log_file", help="Path to the log file")
parser.add_argument(
"--update",
nargs=argparse.ZERO_OR_MORE,
metavar=("key", "value"),
help="Update a key-value pair e.g., --update key1 value1 key2 value2)",
)
parser.add_argument(
"--create-dut-job",
nargs=argparse.ZERO_OR_MORE,
metavar=("key", "value"),
help="Create a new DUT job with key-value pairs (e.g., --create-dut-job key1 value1 key2 value2)",
)
parser.add_argument(
"--update-dut-job",
nargs=argparse.ZERO_OR_MORE,
metavar=("key", "value"),
help="Update a key-value pair in DUT job",
)
parser.add_argument(
"--create-job-phase",
help="Create a new job phase (e.g., --create-job-phase name)",
)
parser.add_argument(
"--update-status-fail",
help="Update fail as the status and log the failure reason (e.g., --update-status-fail reason)",
)
parser.add_argument(
"--update-dut-time",
nargs=argparse.ZERO_OR_MORE,
metavar=("action", "custom_time"),
help="Update DUT start and end time. Provide action ('start', 'submit', 'end') and custom_time (e.g., '2023-01-01T12:00:00')",
)
parser.add_argument(
"--close-dut-job",
action="store_true",
help="Close the dut job by updating end time of last dut job)",
)
parser.add_argument(
"--close",
action="store_true",
help="Updates combined status, submitter's end time and DUT attempt counter",
)
args = parser.parse_args()
process_args(args)
if __name__ == "__main__":
main()

11
bin/ci/download_gl_schema.sh Executable file
View file

@ -0,0 +1,11 @@
#!/bin/sh
# Helper script to download the schema GraphQL from Gitlab to enable IDEs to
# assist the developer to edit gql files
SOURCE_DIR=$(dirname "$(realpath "$0")")
(
cd $SOURCE_DIR || exit 1
gql-cli https://gitlab.freedesktop.org/api/graphql --print-schema > schema.graphql
)

63
bin/ci/gitlab_common.py Normal file
View file

@ -0,0 +1,63 @@
#!/usr/bin/env python3
# Copyright © 2020 - 2022 Collabora Ltd.
# Authors:
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
# David Heidelberg <david.heidelberg@collabora.com>
#
# SPDX-License-Identifier: MIT
'''Shared functions between the scripts.'''
import os
import time
from typing import Optional
def pretty_duration(seconds):
"""Pretty print duration"""
hours, rem = divmod(seconds, 3600)
minutes, seconds = divmod(rem, 60)
if hours:
return f"{hours:0.0f}h{minutes:0.0f}m{seconds:0.0f}s"
if minutes:
return f"{minutes:0.0f}m{seconds:0.0f}s"
return f"{seconds:0.0f}s"
def get_gitlab_project(glab, name: str):
"""Finds a specified gitlab project for given user"""
if "/" in name:
project_path = name
else:
glab.auth()
username = glab.user.username
project_path = f"{username}/{name}"
return glab.projects.get(project_path)
def read_token(token_arg: Optional[str]) -> str:
"""pick token from args or file"""
if token_arg:
return token_arg
return (
open(os.path.expanduser("~/.config/gitlab-token"), encoding="utf-8")
.readline()
.rstrip()
)
def wait_for_pipeline(projects, sha: str, timeout=None):
"""await until pipeline appears in Gitlab"""
project_names = [project.path_with_namespace for project in projects]
print(f"⏲ for the pipeline to appear in {project_names}..", end="")
start_time = time.time()
while True:
for project in projects:
pipelines = project.pipelines.list(sha=sha)
if pipelines:
print("", flush=True)
return (pipelines[0], project)
print("", end=".", flush=True)
if timeout and time.time() - start_time > timeout:
print(" not found", flush=True)
return (None, None)
time.sleep(1)

548
bin/ci/gitlab_gql.py Executable file
View file

@ -0,0 +1,548 @@
#!/usr/bin/env python3
# For the dependencies, see the requirements.txt
import logging
import re
import traceback
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace
from collections import OrderedDict
from copy import deepcopy
from dataclasses import dataclass, field
from itertools import accumulate
from os import getenv
from pathlib import Path
from subprocess import check_output
from textwrap import dedent
from typing import Any, Iterable, Optional, Pattern, TypedDict, Union
import yaml
from filecache import DAY, filecache
from gql import Client, gql
from gql.transport.requests import RequestsHTTPTransport
from graphql import DocumentNode
class DagNode(TypedDict):
needs: set[str]
stage: str
# `name` is redundant but is here for retro-compatibility
name: str
# see create_job_needs_dag function for more details
Dag = dict[str, DagNode]
StageSeq = OrderedDict[str, set[str]]
TOKEN_DIR = Path(getenv("XDG_CONFIG_HOME") or Path.home() / ".config")
def get_token_from_default_dir() -> str:
token_file = TOKEN_DIR / "gitlab-token"
try:
return str(token_file.resolve())
except FileNotFoundError as ex:
print(
f"Could not find {token_file}, please provide a token file as an argument"
)
raise ex
def get_project_root_dir():
root_path = Path(__file__).parent.parent.parent.resolve()
gitlab_file = root_path / ".gitlab-ci.yml"
assert gitlab_file.exists()
return root_path
@dataclass
class GitlabGQL:
_transport: Any = field(init=False)
client: Client = field(init=False)
url: str = "https://gitlab.freedesktop.org/api/graphql"
token: Optional[str] = None
def __post_init__(self) -> None:
self._setup_gitlab_gql_client()
def _setup_gitlab_gql_client(self) -> None:
# Select your transport with a defined url endpoint
headers = {}
if self.token:
headers["Authorization"] = f"Bearer {self.token}"
self._transport = RequestsHTTPTransport(url=self.url, headers=headers)
# Create a GraphQL client using the defined transport
self.client = Client(transport=self._transport, fetch_schema_from_transport=True)
def query(
self,
gql_file: Union[Path, str],
params: dict[str, Any] = {},
operation_name: Optional[str] = None,
paginated_key_loc: Iterable[str] = [],
disable_cache: bool = False,
) -> dict[str, Any]:
def run_uncached() -> dict[str, Any]:
if paginated_key_loc:
return self._sweep_pages(gql_file, params, operation_name, paginated_key_loc)
return self._query(gql_file, params, operation_name)
if disable_cache:
return run_uncached()
try:
# Create an auxiliary variable to deliver a cached result and enable catching exceptions
# Decorate the query to be cached
if paginated_key_loc:
result = self._sweep_pages_cached(
gql_file, params, operation_name, paginated_key_loc
)
else:
result = self._query_cached(gql_file, params, operation_name)
return result # type: ignore
except Exception as ex:
logging.error(f"Cached query failed with {ex}")
# print exception traceback
traceback_str = "".join(traceback.format_exception(ex))
logging.error(traceback_str)
self.invalidate_query_cache()
logging.error("Cache invalidated, retrying without cache")
finally:
return run_uncached()
def _query(
self,
gql_file: Union[Path, str],
params: dict[str, Any] = {},
operation_name: Optional[str] = None,
) -> dict[str, Any]:
# Provide a GraphQL query
source_path: Path = Path(__file__).parent
pipeline_query_file: Path = source_path / gql_file
query: DocumentNode
with open(pipeline_query_file, "r") as f:
pipeline_query = f.read()
query = gql(pipeline_query)
# Execute the query on the transport
return self.client.execute_sync(
query, variable_values=params, operation_name=operation_name
)
@filecache(DAY)
def _sweep_pages_cached(self, *args, **kwargs):
return self._sweep_pages(*args, **kwargs)
@filecache(DAY)
def _query_cached(self, *args, **kwargs):
return self._query(*args, **kwargs)
def _sweep_pages(
self, query, params, operation_name=None, paginated_key_loc: Iterable[str] = []
) -> dict[str, Any]:
"""
Retrieve paginated data from a GraphQL API and concatenate the results into a single
response.
Args:
query: represents a filepath with the GraphQL query to be executed.
params: a dictionary that contains the parameters to be passed to the query. These
parameters can be used to filter or modify the results of the query.
operation_name: The `operation_name` parameter is an optional parameter that specifies
the name of the GraphQL operation to be executed. It is used when making a GraphQL
query to specify which operation to execute if there are multiple operations defined
in the GraphQL schema. If not provided, the default operation will be executed.
paginated_key_loc (Iterable[str]): The `paginated_key_loc` parameter is an iterable of
strings that represents the location of the paginated field within the response. It
is used to extract the paginated field from the response and append it to the final
result. The node has to be a list of objects with a `pageInfo` field that contains
at least the `hasNextPage` and `endCursor` fields.
Returns:
a dictionary containing the response from the query with the paginated field
concatenated.
"""
def fetch_page(cursor: str | None = None) -> dict[str, Any]:
if cursor:
params["cursor"] = cursor
logging.info(
f"Found more than 100 elements, paginating. "
f"Current cursor at {cursor}"
)
return self._query(query, params, operation_name)
# Execute the initial query
response: dict[str, Any] = fetch_page()
# Initialize an empty list to store the final result
final_partial_field: list[dict[str, Any]] = []
# Loop until all pages have been retrieved
while True:
# Get the partial field to be appended to the final result
partial_field = response
for key in paginated_key_loc:
partial_field = partial_field[key]
# Append the partial field to the final result
final_partial_field += partial_field["nodes"]
# Check if there are more pages to retrieve
page_info = partial_field["pageInfo"]
if not page_info["hasNextPage"]:
break
# Execute the query with the updated cursor parameter
response = fetch_page(page_info["endCursor"])
# Replace the "nodes" field in the original response with the final result
partial_field["nodes"] = final_partial_field
return response
def invalidate_query_cache(self) -> None:
logging.warning("Invalidating query cache")
try:
self._sweep_pages._db.clear()
self._query._db.clear()
except AttributeError as ex:
logging.warning(f"Could not invalidate cache, maybe it was not used in {ex.args}?")
def insert_early_stage_jobs(stage_sequence: StageSeq, jobs_metadata: Dag) -> Dag:
pre_processed_dag: dict[str, set[str]] = {}
jobs_from_early_stages = list(accumulate(stage_sequence.values(), set.union))
for job_name, metadata in jobs_metadata.items():
final_needs: set[str] = deepcopy(metadata["needs"])
# Pre-process jobs that are not based on needs field
# e.g. sanity job in mesa MR pipelines
if not final_needs:
job_stage: str = jobs_metadata[job_name]["stage"]
stage_index: int = list(stage_sequence.keys()).index(job_stage)
if stage_index > 0:
final_needs |= jobs_from_early_stages[stage_index - 1]
pre_processed_dag[job_name] = final_needs
for job_name, needs in pre_processed_dag.items():
jobs_metadata[job_name]["needs"] = needs
return jobs_metadata
def traverse_dag_needs(jobs_metadata: Dag) -> None:
created_jobs = set(jobs_metadata.keys())
for job, metadata in jobs_metadata.items():
final_needs: set = deepcopy(metadata["needs"]) & created_jobs
# Post process jobs that are based on needs field
partial = True
while partial:
next_depth: set[str] = {n for dn in final_needs for n in jobs_metadata[dn]["needs"]}
partial: bool = not final_needs.issuperset(next_depth)
final_needs = final_needs.union(next_depth)
jobs_metadata[job]["needs"] = final_needs
def extract_stages_and_job_needs(
pipeline_jobs: dict[str, Any], pipeline_stages: dict[str, Any]
) -> tuple[StageSeq, Dag]:
jobs_metadata = Dag()
# Record the stage sequence to post process deps that are not based on needs
# field, for example: sanity job
stage_sequence: OrderedDict[str, set[str]] = OrderedDict()
for stage in pipeline_stages["nodes"]:
stage_sequence[stage["name"]] = set()
for job in pipeline_jobs["nodes"]:
stage_sequence[job["stage"]["name"]].add(job["name"])
dag_job: DagNode = {
"name": job["name"],
"stage": job["stage"]["name"],
"needs": set([j["node"]["name"] for j in job["needs"]["edges"]]),
}
jobs_metadata[job["name"]] = dag_job
return stage_sequence, jobs_metadata
def create_job_needs_dag(gl_gql: GitlabGQL, params, disable_cache: bool = True) -> Dag:
"""
This function creates a Directed Acyclic Graph (DAG) to represent a sequence of jobs, where each
job has a set of jobs that it depends on (its "needs") and belongs to a certain "stage".
The "name" of the job is used as the key in the dictionary.
For example, consider the following DAG:
1. build stage: job1 -> job2 -> job3
2. test stage: job2 -> job4
- The job needs for job3 are: job1, job2
- The job needs for job4 are: job2
- The job2 needs to wait all jobs from build stage to finish.
The resulting DAG would look like this:
dag = {
"job1": {"needs": set(), "stage": "build", "name": "job1"},
"job2": {"needs": {"job1", "job2", job3"}, "stage": "test", "name": "job2"},
"job3": {"needs": {"job1", "job2"}, "stage": "build", "name": "job3"},
"job4": {"needs": {"job2"}, "stage": "test", "name": "job4"},
}
To access the job needs, one can do:
dag["job3"]["needs"]
This will return the set of jobs that job3 needs: {"job1", "job2"}
Args:
gl_gql (GitlabGQL): The `gl_gql` parameter is an instance of the `GitlabGQL` class, which is
used to make GraphQL queries to the GitLab API.
params (dict): The `params` parameter is a dictionary that contains the necessary parameters
for the GraphQL query. It is used to specify the details of the pipeline for which the
job needs DAG is being created.
The specific keys and values in the `params` dictionary will depend on
the requirements of the GraphQL query being executed
disable_cache (bool): The `disable_cache` parameter is a boolean that specifies whether the
Returns:
The final DAG (Directed Acyclic Graph) representing the job dependencies sourced from needs
or stages rule.
"""
stages_jobs_gql = gl_gql.query(
"pipeline_details.gql",
params=params,
paginated_key_loc=["project", "pipeline", "jobs"],
disable_cache=disable_cache,
)
pipeline_data = stages_jobs_gql["project"]["pipeline"]
if not pipeline_data:
raise RuntimeError(f"Could not find any pipelines for {params}")
stage_sequence, jobs_metadata = extract_stages_and_job_needs(
pipeline_data["jobs"], pipeline_data["stages"]
)
# Fill the DAG with the job needs from stages that don't have any needs but still need to wait
# for previous stages
final_dag = insert_early_stage_jobs(stage_sequence, jobs_metadata)
# Now that each job has its direct needs filled correctly, update the "needs" field for each job
# in the DAG by performing a topological traversal
traverse_dag_needs(final_dag)
return final_dag
def filter_dag(dag: Dag, regex: Pattern) -> Dag:
jobs_with_regex: set[str] = {job for job in dag if regex.fullmatch(job)}
return Dag({job: data for job, data in dag.items() if job in sorted(jobs_with_regex)})
def print_dag(dag: Dag) -> None:
for job, data in dag.items():
print(f"{job}:")
print(f"\t{' '.join(data['needs'])}")
print()
def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[str, Any]:
params["content"] = dedent("""\
include:
- local: .gitlab-ci.yml
""")
raw_response = gl_gql.query("job_details.gql", params)
if merged_yaml := raw_response["ciConfig"]["mergedYaml"]:
return yaml.safe_load(merged_yaml)
gl_gql.invalidate_query_cache()
raise ValueError(
"""
Could not fetch any content for merged YAML,
please verify if the git SHA exists in remote.
Maybe you forgot to `git push`? """
)
def recursive_fill(job, relationship_field, target_data, acc_data: dict, merged_yaml):
if relatives := job.get(relationship_field):
if isinstance(relatives, str):
relatives = [relatives]
for relative in relatives:
parent_job = merged_yaml[relative]
acc_data = recursive_fill(parent_job, acc_data, merged_yaml) # type: ignore
acc_data |= job.get(target_data, {})
return acc_data
def get_variables(job, merged_yaml, project_path, sha) -> dict[str, str]:
p = get_project_root_dir() / ".gitlab-ci" / "image-tags.yml"
image_tags = yaml.safe_load(p.read_text())
variables = image_tags["variables"]
variables |= merged_yaml["variables"]
variables |= job["variables"]
variables["CI_PROJECT_PATH"] = project_path
variables["CI_PROJECT_NAME"] = project_path.split("/")[1]
variables["CI_REGISTRY_IMAGE"] = "registry.freedesktop.org/${CI_PROJECT_PATH}"
variables["CI_COMMIT_SHA"] = sha
while recurse_among_variables_space(variables):
pass
return variables
# Based on: https://stackoverflow.com/a/2158532/1079223
def flatten(xs):
for x in xs:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from flatten(x)
else:
yield x
def get_full_script(job) -> list[str]:
script = []
for script_part in ("before_script", "script", "after_script"):
script.append(f"# {script_part}")
lines = flatten(job.get(script_part, []))
script.extend(lines)
script.append("")
return script
def recurse_among_variables_space(var_graph) -> bool:
updated = False
for var, value in var_graph.items():
value = str(value)
dep_vars = []
if match := re.findall(r"(\$[{]?[\w\d_]*[}]?)", value):
all_dep_vars = [v.lstrip("${").rstrip("}") for v in match]
# print(value, match, all_dep_vars)
dep_vars = [v for v in all_dep_vars if v in var_graph]
for dep_var in dep_vars:
dep_value = str(var_graph[dep_var])
new_value = var_graph[var]
new_value = new_value.replace(f"${{{dep_var}}}", dep_value)
new_value = new_value.replace(f"${dep_var}", dep_value)
var_graph[var] = new_value
updated |= dep_value != new_value
return updated
def print_job_final_definition(job_name, merged_yaml, project_path, sha):
job = merged_yaml[job_name]
variables = get_variables(job, merged_yaml, project_path, sha)
print("# --------- variables ---------------")
for var, value in sorted(variables.items()):
print(f"export {var}={value!r}")
# TODO: Recurse into needs to get full script
# TODO: maybe create a extra yaml file to avoid too much rework
script = get_full_script(job)
print()
print()
print("# --------- full script ---------------")
print("\n".join(script))
if image := variables.get("MESA_IMAGE"):
print()
print()
print("# --------- container image ---------------")
print(image)
def from_sha_to_pipeline_iid(gl_gql: GitlabGQL, params) -> str:
result = gl_gql.query("pipeline_utils.gql", params)
return result["project"]["pipelines"]["nodes"][0]["iid"]
def parse_args() -> Namespace:
parser = ArgumentParser(
formatter_class=ArgumentDefaultsHelpFormatter,
description="CLI and library with utility functions to debug jobs via Gitlab GraphQL",
epilog=f"""Example:
{Path(__file__).name} --print-dag""",
)
parser.add_argument("-pp", "--project-path", type=str, default="mesa/mesa")
parser.add_argument("--sha", "--rev", type=str, default='HEAD')
parser.add_argument(
"--regex",
type=str,
required=False,
help="Regex pattern for the job name to be considered",
)
mutex_group_print = parser.add_mutually_exclusive_group()
mutex_group_print.add_argument(
"--print-dag",
action="store_true",
help="Print job needs DAG",
)
mutex_group_print.add_argument(
"--print-merged-yaml",
action="store_true",
help="Print the resulting YAML for the specific SHA",
)
mutex_group_print.add_argument(
"--print-job-manifest",
metavar='JOB_NAME',
type=str,
help="Print the resulting job data"
)
parser.add_argument(
"--gitlab-token-file",
type=str,
default=get_token_from_default_dir(),
help="force GitLab token, otherwise it's read from $XDG_CONFIG_HOME/gitlab-token",
)
args = parser.parse_args()
args.gitlab_token = Path(args.gitlab_token_file).read_text().strip()
return args
def main():
args = parse_args()
gl_gql = GitlabGQL(token=args.gitlab_token)
sha = check_output(['git', 'rev-parse', args.sha]).decode('ascii').strip()
if args.print_dag:
iid = from_sha_to_pipeline_iid(gl_gql, {"projectPath": args.project_path, "sha": sha})
dag = create_job_needs_dag(
gl_gql, {"projectPath": args.project_path, "iid": iid}, disable_cache=True
)
if args.regex:
dag = filter_dag(dag, re.compile(args.regex))
print_dag(dag)
if args.print_merged_yaml or args.print_job_manifest:
merged_yaml = fetch_merged_yaml(
gl_gql, {"projectPath": args.project_path, "sha": sha}
)
if args.print_merged_yaml:
print(yaml.dump(merged_yaml, indent=2))
if args.print_job_manifest:
print_job_final_definition(
args.print_job_manifest, merged_yaml, args.project_path, sha
)
if __name__ == "__main__":
main()

10
bin/ci/gitlab_gql.sh Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/../python-venv.sh" \
"$this_dir/requirements.txt" \
"$this_dir/gitlab_gql.py" "$@"

7
bin/ci/job_details.gql Normal file
View file

@ -0,0 +1,7 @@
query getCiConfigData($projectPath: ID!, $sha: String, $content: String!) {
ciConfig(projectPath: $projectPath, sha: $sha, content: $content) {
errors
mergedYaml
__typename
}
}

67
bin/ci/marge_queue.py Executable file
View file

@ -0,0 +1,67 @@
#!/usr/bin/env python3
# Copyright © 2020 - 2023 Collabora Ltd.
# Authors:
# David Heidelberg <david.heidelberg@collabora.com>
#
# SPDX-License-Identifier: MIT
"""
Monitors Marge-bot and return number of assigned MRs.
"""
import argparse
import time
import sys
from datetime import datetime, timezone
from dateutil import parser
import gitlab
from gitlab_common import read_token, pretty_duration
REFRESH_WAIT = 30
MARGE_BOT_USER_ID = 9716
def parse_args() -> None:
"""Parse args"""
parse = argparse.ArgumentParser(
description="Tool to show merge requests assigned to the marge-bot",
)
parse.add_argument(
"--wait", action="store_true", help="wait until CI is free",
)
parse.add_argument(
"--token",
metavar="token",
help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
)
return parse.parse_args()
if __name__ == "__main__":
args = parse_args()
token = read_token(args.token)
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
project = gl.projects.get("mesa/mesa")
while True:
mrs = project.mergerequests.list(assignee_id=MARGE_BOT_USER_ID, scope="all", state="opened", get_all=True)
jobs_num = len(mrs)
for mr in mrs:
updated = parser.parse(mr.updated_at)
now = datetime.now(timezone.utc)
diff = (now - updated).total_seconds()
print(
f"\u001b]8;;{mr.web_url}\u001b\\{mr.title}\u001b]8;;\u001b\\ ({pretty_duration(diff)})"
)
print("Job waiting: " + str(jobs_num))
if jobs_num == 0:
sys.exit(0)
if not args.wait:
sys.exit(min(jobs_num, 127))
time.sleep(REFRESH_WAIT)

10
bin/ci/marge_queue.sh Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/../python-venv.sh" \
"$this_dir/requirements.txt" \
"$this_dir/marge_queue.py" "$@"

View file

@ -0,0 +1,35 @@
query jobs($projectPath: ID!, $iid: ID!, $cursor: String) {
project(fullPath: $projectPath) {
id
pipeline(iid: $iid) {
id
iid
complete
stages {
nodes {
name
}
}
jobs(after: $cursor) {
pageInfo {
hasNextPage
endCursor
}
count
nodes {
name
needs {
edges {
node {
name
}
}
}
stage {
name
}
}
}
}
}
}

View file

@ -0,0 +1,9 @@
query sha2pipelineIID($projectPath: ID!, $sha: String!) {
project(fullPath: $projectPath) {
pipelines(last: 1, sha:$sha){
nodes {
iid
}
}
}
}

8
bin/ci/requirements.txt Normal file
View file

@ -0,0 +1,8 @@
colorama==0.4.5
filecache==0.81
gql==3.4.0
python-dateutil==2.8.2
python-gitlab==3.5.0
PyYAML==6.0.1
ruamel.yaml.clib==0.2.8
ruamel.yaml==0.17.21

294
bin/ci/structured_logger.py Normal file
View file

@ -0,0 +1,294 @@
"""
A structured logging utility supporting multiple data formats such as CSV, JSON,
and YAML.
The main purpose of this script, besides having relevant information available
in a condensed and deserialized.
This script defines a protocol for different file handling strategies and provides
implementations for CSV, JSON, and YAML formats. The main class, StructuredLogger,
allows for easy interaction with log data, enabling users to load, save, increment,
set, and append fields in the log. The script also includes context managers for
file locking and editing log data to ensure data integrity and avoid race conditions.
"""
import json
import os
from collections.abc import MutableMapping, MutableSequence
from contextlib import contextmanager
from datetime import datetime
from pathlib import Path
from typing import Any, Protocol
import fire
from filelock import FileLock
try:
import polars as pl
CSV_LIB_EXCEPTION = None
except ImportError as e:
CSV_LIB_EXCEPTION: ImportError = e
try:
from ruamel.yaml import YAML
YAML_LIB_EXCEPTION = None
except ImportError as e:
YAML_LIB_EXCEPTION: ImportError = e
class ContainerProxy:
"""
A proxy class that wraps a mutable container object (such as a dictionary or
a list) and calls a provided save_callback function whenever the container
or its contents
are changed.
"""
def __init__(self, container, save_callback):
self.container = container
self.save_callback = save_callback
def __getitem__(self, key):
value = self.container[key]
if isinstance(value, (MutableMapping, MutableSequence)):
return ContainerProxy(value, self.save_callback)
return value
def __setitem__(self, key, value):
self.container[key] = value
self.save_callback()
def __delitem__(self, key):
del self.container[key]
self.save_callback()
def __getattr__(self, name):
attr = getattr(self.container, name)
if callable(attr):
def wrapper(*args, **kwargs):
result = attr(*args, **kwargs)
self.save_callback()
return result
return wrapper
return attr
def __iter__(self):
return iter(self.container)
def __len__(self):
return len(self.container)
def __repr__(self):
return repr(self.container)
class AutoSaveDict(dict):
"""
A subclass of the built-in dict class with additional functionality to
automatically save changes to the dictionary. It maintains a timestamp of
the last modification and automatically wraps nested mutable containers
using ContainerProxy.
"""
timestamp_key = "_timestamp"
def __init__(self, *args, save_callback, register_timestamp=True, **kwargs):
self.save_callback = save_callback
self.__register_timestamp = register_timestamp
self.__heartbeat()
super().__init__(*args, **kwargs)
self.__wrap_dictionaries()
def __heartbeat(self):
if self.__register_timestamp:
self[AutoSaveDict.timestamp_key] = datetime.now().isoformat()
def __save(self):
self.__heartbeat()
self.save_callback()
def __wrap_dictionaries(self):
for key, value in self.items():
if isinstance(value, MutableMapping) and not isinstance(
value, AutoSaveDict
):
self[key] = AutoSaveDict(
value, save_callback=self.save_callback, register_timestamp=False
)
def __setitem__(self, key, value):
if isinstance(value, MutableMapping) and not isinstance(value, AutoSaveDict):
value = AutoSaveDict(
value, save_callback=self.save_callback, register_timestamp=False
)
super().__setitem__(key, value)
if self.__register_timestamp and key == AutoSaveDict.timestamp_key:
return
self.__save()
def __getitem__(self, key):
value = super().__getitem__(key)
if isinstance(value, (MutableMapping, MutableSequence)):
return ContainerProxy(value, self.__save)
return value
def __delitem__(self, key):
super().__delitem__(key)
self.__save()
def pop(self, *args, **kwargs):
result = super().pop(*args, **kwargs)
self.__save()
return result
def update(self, *args, **kwargs):
super().update(*args, **kwargs)
self.__wrap_dictionaries()
self.__save()
class StructuredLoggerStrategy(Protocol):
def load_data(self, file_path: Path) -> dict:
pass
def save_data(self, file_path: Path, data: dict) -> None:
pass
class CSVStrategy:
def __init__(self) -> None:
if CSV_LIB_EXCEPTION:
raise RuntimeError(
"Can't parse CSV files. Missing library"
) from CSV_LIB_EXCEPTION
def load_data(self, file_path: Path) -> dict:
dicts: list[dict[str, Any]] = pl.read_csv(
file_path, try_parse_dates=True
).to_dicts()
data = {}
for d in dicts:
for k, v in d.items():
if k != AutoSaveDict.timestamp_key and k in data:
if isinstance(data[k], list):
data[k].append(v)
continue
data[k] = [data[k], v]
else:
data[k] = v
return data
def save_data(self, file_path: Path, data: dict) -> None:
pl.DataFrame(data).write_csv(file_path)
class JSONStrategy:
def load_data(self, file_path: Path) -> dict:
return json.loads(file_path.read_text())
def save_data(self, file_path: Path, data: dict) -> None:
with open(file_path, "w") as f:
json.dump(data, f, indent=2)
class YAMLStrategy:
def __init__(self):
if YAML_LIB_EXCEPTION:
raise RuntimeError(
"Can't parse YAML files. Missing library"
) from YAML_LIB_EXCEPTION
self.yaml = YAML()
self.yaml.indent(sequence=4, offset=2)
self.yaml.default_flow_style = False
self.yaml.representer.add_representer(AutoSaveDict, self.represent_dict)
@classmethod
def represent_dict(cls, dumper, data):
return dumper.represent_mapping("tag:yaml.org,2002:map", data)
def load_data(self, file_path: Path) -> dict:
return self.yaml.load(file_path.read_text())
def save_data(self, file_path: Path, data: dict) -> None:
with open(file_path, "w") as f:
self.yaml.dump(data, f)
class StructuredLogger:
def __init__(
self, file_name: str, strategy: StructuredLoggerStrategy = None, truncate=False
):
self.file_name: str = file_name
self.file_path = Path(self.file_name)
self._data: AutoSaveDict = AutoSaveDict(save_callback=self.save_data)
if strategy is None:
self.strategy: StructuredLoggerStrategy = self.guess_strategy_from_file(
self.file_path
)
else:
self.strategy = strategy
if not self.file_path.exists():
Path.mkdir(self.file_path.parent, exist_ok=True)
self.save_data()
return
if truncate:
with self.get_lock():
os.truncate(self.file_path, 0)
self.save_data()
def load_data(self):
self._data = self.strategy.load_data(self.file_path)
def save_data(self):
self.strategy.save_data(self.file_path, self._data)
@property
def data(self) -> AutoSaveDict:
return self._data
@contextmanager
def get_lock(self):
with FileLock(f"{self.file_path}.lock", timeout=10):
yield
@contextmanager
def edit_context(self):
"""
Context manager that ensures proper loading and saving of log data when
performing multiple modifications.
"""
with self.get_lock():
try:
self.load_data()
yield
finally:
self.save_data()
@staticmethod
def guess_strategy_from_file(file_path: Path) -> StructuredLoggerStrategy:
file_extension = file_path.suffix.lower().lstrip(".")
return StructuredLogger.get_strategy(file_extension)
@staticmethod
def get_strategy(strategy_name: str) -> StructuredLoggerStrategy:
strategies = {
"csv": CSVStrategy,
"json": JSONStrategy,
"yaml": YAMLStrategy,
"yml": YAMLStrategy,
}
try:
return strategies[strategy_name]()
except KeyError as e:
raise ValueError(f"Unknown strategy for: {strategy_name}") from e
if __name__ == "__main__":
fire.Fire(StructuredLogger)

View file

@ -0,0 +1,5 @@
filelock==3.12.4
fire==0.5.0
mock==5.1.0
polars==0.19.3
pytest==7.4.2

View file

@ -0,0 +1,669 @@
import logging
import subprocess
from datetime import datetime
import pytest
from custom_logger import CustomLogger
@pytest.fixture
def tmp_log_file(tmp_path):
return tmp_path / "test_log.json"
@pytest.fixture
def custom_logger(tmp_log_file):
return CustomLogger(tmp_log_file)
def run_script_with_args(args):
import custom_logger
script_path = custom_logger.__file__
return subprocess.run(
["python3", str(script_path), *args], capture_output=True, text=True
)
# Test case for missing log file
@pytest.mark.parametrize(
"key, value", [("dut_attempt_counter", "1"), ("job_combined_status", "pass")]
)
def test_missing_log_file_argument(key, value):
result = run_script_with_args(["--update", "key", "value"])
assert result.returncode != 0
# Parametrize test case for valid update arguments
@pytest.mark.parametrize(
"key, value", [("dut_attempt_counter", "1"), ("job_combined_status", "pass")]
)
def test_update_argument_valid(custom_logger, tmp_log_file, key, value):
result = run_script_with_args([str(tmp_log_file), "--update", key, value])
assert result.returncode == 0
# Test case for passing only the key without a value
def test_update_argument_key_only(custom_logger, tmp_log_file):
key = "dut_attempt_counter"
result = run_script_with_args([str(tmp_log_file), "--update", key])
assert result.returncode != 0
# Test case for not passing any key-value pair
def test_update_argument_no_values(custom_logger, tmp_log_file):
result = run_script_with_args([str(tmp_log_file), "--update"])
assert result.returncode == 0
# Parametrize test case for valid arguments
@pytest.mark.parametrize(
"key, value", [("dut_attempt_counter", "1"), ("job_combined_status", "pass")]
)
def test_create_argument_valid(custom_logger, tmp_log_file, key, value):
result = run_script_with_args([str(tmp_log_file), "--create-dut-job", key, value])
assert result.returncode == 0
# Test case for passing only the key without a value
def test_create_argument_key_only(custom_logger, tmp_log_file):
key = "dut_attempt_counter"
result = run_script_with_args([str(tmp_log_file), "--create-dut-job", key])
assert result.returncode != 0
# Test case for not passing any key-value pair
def test_create_argument_no_values(custom_logger, tmp_log_file):
result = run_script_with_args([str(tmp_log_file), "--create-dut-job"])
assert result.returncode == 0
# Test case for updating a DUT job
@pytest.mark.parametrize(
"key, value", [("status", "hung"), ("dut_state", "Canceling"), ("dut_name", "asus")]
)
def test_update_dut_job(custom_logger, tmp_log_file, key, value):
result = run_script_with_args([str(tmp_log_file), "--update-dut-job", key, value])
assert result.returncode != 0
result = run_script_with_args([str(tmp_log_file), "--create-dut-job", key, value])
assert result.returncode == 0
result = run_script_with_args([str(tmp_log_file), "--update-dut-job", key, value])
assert result.returncode == 0
# Test case for updating last DUT job
def test_update_dut_multiple_job(custom_logger, tmp_log_file):
# Create the first DUT job with the first key
result = run_script_with_args(
[str(tmp_log_file), "--create-dut-job", "status", "hung"]
)
assert result.returncode == 0
# Create the second DUT job with the second key
result = run_script_with_args(
[str(tmp_log_file), "--create-dut-job", "dut_state", "Canceling"]
)
assert result.returncode == 0
result = run_script_with_args(
[str(tmp_log_file), "--update-dut-job", "dut_name", "asus"]
)
assert result.returncode == 0
# Parametrize test case for valid phase arguments
@pytest.mark.parametrize(
"phase_name",
[("Phase1"), ("Phase2"), ("Phase3")],
)
def test_create_job_phase_valid(custom_logger, tmp_log_file, phase_name):
custom_logger.create_dut_job(status="pass")
result = run_script_with_args([str(tmp_log_file), "--create-job-phase", phase_name])
assert result.returncode == 0
# Test case for not passing any arguments for create-job-phase
def test_create_job_phase_no_arguments(custom_logger, tmp_log_file):
custom_logger.create_dut_job(status="pass")
result = run_script_with_args([str(tmp_log_file), "--create-job-phase"])
assert result.returncode != 0
# Test case for trying to create a phase job without an existing DUT job
def test_create_job_phase_no_dut_job(custom_logger, tmp_log_file):
phase_name = "Phase1"
result = run_script_with_args([str(tmp_log_file), "--create-job-phase", phase_name])
assert result.returncode != 0
# Combined test cases for valid scenarios
def test_valid_scenarios(custom_logger, tmp_log_file):
valid_update_args = [("dut_attempt_counter", "1"), ("job_combined_status", "pass")]
for key, value in valid_update_args:
result = run_script_with_args([str(tmp_log_file), "--update", key, value])
assert result.returncode == 0
valid_create_args = [
("status", "hung"),
("dut_state", "Canceling"),
("dut_name", "asus"),
("phase_name", "Bootloader"),
]
for key, value in valid_create_args:
result = run_script_with_args(
[str(tmp_log_file), "--create-dut-job", key, value]
)
assert result.returncode == 0
result = run_script_with_args(
[str(tmp_log_file), "--create-dut-job", "status", "hung"]
)
assert result.returncode == 0
result = run_script_with_args(
[str(tmp_log_file), "--update-dut-job", "dut_name", "asus"]
)
assert result.returncode == 0
result = run_script_with_args(
[
str(tmp_log_file),
"--create-job-phase",
"phase_name",
]
)
assert result.returncode == 0
# Parametrize test case for valid update arguments
@pytest.mark.parametrize(
"key, value", [("dut_attempt_counter", "1"), ("job_combined_status", "pass")]
)
def test_update(custom_logger, key, value):
custom_logger.update(**{key: value})
logger_data = custom_logger.logger.data
assert key in logger_data
assert logger_data[key] == value
# Test case for updating with a key that already exists
def test_update_existing_key(custom_logger):
key = "status"
value = "new_value"
custom_logger.logger.data[key] = "old_value"
custom_logger.update(**{key: value})
logger_data = custom_logger.logger.data
assert key in logger_data
assert logger_data[key] == value
# Test case for updating "dut_jobs"
def test_update_dut_jobs(custom_logger):
key1 = "status"
value1 = "fail"
key2 = "state"
value2 = "hung"
custom_logger.create_dut_job(**{key1: value1})
logger_data = custom_logger.logger.data
job1 = logger_data["dut_jobs"][0]
assert key1 in job1
assert job1[key1] == value1
custom_logger.update_dut_job(key2, value2)
logger_data = custom_logger.logger.data
job2 = logger_data["dut_jobs"][0]
assert key2 in job2
assert job2[key2] == value2
# Test case for creating and updating DUT job
def test_create_dut_job(custom_logger):
key = "status"
value1 = "pass"
value2 = "fail"
value3 = "hung"
reason = "job_combined_status"
result = "Finished"
custom_logger.update(**{reason: result})
logger_data = custom_logger.logger.data
assert reason in logger_data
assert logger_data[reason] == result
# Create the first DUT job
custom_logger.create_dut_job(**{key: value1})
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 1
assert isinstance(logger_data["dut_jobs"][0], dict)
# Check the values of the keys in the created first DUT job
job1 = logger_data["dut_jobs"][0]
assert key in job1
assert job1[key] == value1
# Create the second DUT job
custom_logger.create_dut_job(**{key: value2})
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 2
assert isinstance(logger_data["dut_jobs"][1], dict)
# Check the values of the keys in the created second DUT job
job2 = logger_data["dut_jobs"][1]
assert key in job2
assert job2[key] == value2
# Update the second DUT job with value3
custom_logger.update_dut_job(key, value3)
logger_data = custom_logger.logger.data
# Check the updated value in the second DUT job
job2 = logger_data["dut_jobs"][1]
assert key in job2
assert job2[key] == value3
# Find the index of the last DUT job
last_job_index = len(logger_data["dut_jobs"]) - 1
# Update the last DUT job
custom_logger.update_dut_job("dut_name", "asus")
logger_data = custom_logger.logger.data
# Check the updated value in the last DUT job
job2 = logger_data["dut_jobs"][last_job_index]
assert "dut_name" in job2
assert job2["dut_name"] == "asus"
# Check that "dut_name" is not present in other DUT jobs
for idx, job in enumerate(logger_data["dut_jobs"]):
if idx != last_job_index:
assert job.get("dut_name") == ""
# Test case for updating with missing "dut_jobs" key
def test_update_dut_job_missing_dut_jobs(custom_logger):
key = "status"
value = "fail"
# Attempt to update a DUT job when "dut_jobs" is missing
with pytest.raises(ValueError, match="No DUT jobs found."):
custom_logger.update_dut_job(key, value)
# Test case for creating a job phase
def test_create_job_phase(custom_logger):
custom_logger.create_dut_job(status="pass")
phase_name = "Phase1"
custom_logger.create_job_phase(phase_name)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 1
job = logger_data["dut_jobs"][0]
assert "dut_job_phases" in job
assert isinstance(job["dut_job_phases"], list)
assert len(job["dut_job_phases"]) == 1
phase = job["dut_job_phases"][0]
assert phase["name"] == phase_name
try:
datetime.fromisoformat(phase["start_time"])
assert True
except ValueError:
assert False
assert phase["end_time"] == ""
# Test case for creating multiple phase jobs
def test_create_multiple_phase_jobs(custom_logger):
custom_logger.create_dut_job(status="pass")
phase_data = [
{
"phase_name": "Phase1",
},
{
"phase_name": "Phase2",
},
{
"phase_name": "Phase3",
},
]
for data in phase_data:
phase_name = data["phase_name"]
custom_logger.create_job_phase(phase_name)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 1
job = logger_data["dut_jobs"][0]
assert "dut_job_phases" in job
assert isinstance(job["dut_job_phases"], list)
assert len(job["dut_job_phases"]) == len(phase_data)
for data in phase_data:
phase_name = data["phase_name"]
phase = job["dut_job_phases"][phase_data.index(data)]
assert phase["name"] == phase_name
try:
datetime.fromisoformat(phase["start_time"])
assert True
except ValueError:
assert False
if phase_data.index(data) != len(phase_data) - 1:
try:
datetime.fromisoformat(phase["end_time"])
assert True
except ValueError:
assert False
# Check if the end_time of the last phase is an empty string
last_phase = job["dut_job_phases"][-1]
assert last_phase["end_time"] == ""
# Test case for creating multiple dut jobs and updating phase job for last dut job
def test_create_two_dut_jobs_and_add_phase(custom_logger):
# Create the first DUT job
custom_logger.create_dut_job(status="pass")
# Create the second DUT job
custom_logger.create_dut_job(status="fail")
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 2
first_dut_job = logger_data["dut_jobs"][0]
second_dut_job = logger_data["dut_jobs"][1]
# Add a phase to the second DUT job
custom_logger.create_job_phase("Phase1")
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert isinstance(logger_data["dut_jobs"], list)
assert len(logger_data["dut_jobs"]) == 2
first_dut_job = logger_data["dut_jobs"][0]
second_dut_job = logger_data["dut_jobs"][1]
# Check first DUT job does not have a phase
assert not first_dut_job.get("dut_job_phases")
# Check second DUT job has a phase
assert second_dut_job.get("dut_job_phases")
assert isinstance(second_dut_job["dut_job_phases"], list)
assert len(second_dut_job["dut_job_phases"]) == 1
# Test case for updating DUT start time
def test_update_dut_start_time(custom_logger):
custom_logger.create_dut_job(status="pass")
custom_logger.update_dut_time("start", None)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "dut_start_time" in dut_job
assert dut_job["dut_start_time"] != ""
try:
datetime.fromisoformat(dut_job["dut_start_time"])
assert True
except ValueError:
assert False
# Test case for updating DUT submit time
def test_update_dut_submit_time(custom_logger):
custom_time = "2023-11-09T02:37:06Z"
custom_logger.create_dut_job(status="pass")
custom_logger.update_dut_time("submit", custom_time)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "dut_submit_time" in dut_job
try:
datetime.fromisoformat(dut_job["dut_submit_time"])
assert True
except ValueError:
assert False
# Test case for updating DUT end time
def test_update_dut_end_time(custom_logger):
custom_logger.create_dut_job(status="pass")
custom_logger.update_dut_time("end", None)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "dut_end_time" in dut_job
try:
datetime.fromisoformat(dut_job["dut_end_time"])
assert True
except ValueError:
assert False
# Test case for updating DUT time with invalid value
def test_update_dut_time_invalid_value(custom_logger):
custom_logger.create_dut_job(status="pass")
with pytest.raises(
ValueError,
match="Error: Invalid argument provided for --update-dut-time. Use 'start', 'submit', 'end'.",
):
custom_logger.update_dut_time("invalid_value", None)
# Test case for close_dut_job
def test_close_dut_job(custom_logger):
custom_logger.create_dut_job(status="pass")
custom_logger.create_job_phase("Phase1")
custom_logger.create_job_phase("Phase2")
custom_logger.close_dut_job()
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "dut_job_phases" in dut_job
dut_job_phases = dut_job["dut_job_phases"]
phase1 = dut_job_phases[0]
assert phase1["name"] == "Phase1"
try:
datetime.fromisoformat(phase1["start_time"])
assert True
except ValueError:
assert False
try:
datetime.fromisoformat(phase1["end_time"])
assert True
except ValueError:
assert False
phase2 = dut_job_phases[1]
assert phase2["name"] == "Phase2"
try:
datetime.fromisoformat(phase2["start_time"])
assert True
except ValueError:
assert False
try:
datetime.fromisoformat(phase2["end_time"])
assert True
except ValueError:
assert False
# Test case for close
def test_close(custom_logger):
custom_logger.create_dut_job(status="pass")
custom_logger.close()
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
assert "dut_attempt_counter" in logger_data
assert logger_data["dut_attempt_counter"] == len(logger_data["dut_jobs"])
assert "job_combined_status" in logger_data
assert logger_data["job_combined_status"] != ""
dut_job = logger_data["dut_jobs"][0]
assert "submitter_end_time" in dut_job
try:
datetime.fromisoformat(dut_job["submitter_end_time"])
assert True
except ValueError:
assert False
# Test case for updating status to fail with a reason
def test_update_status_fail_with_reason(custom_logger):
custom_logger.create_dut_job()
reason = "kernel panic"
custom_logger.update_status_fail(reason)
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "status" in dut_job
assert dut_job["status"] == "fail"
assert "dut_job_fail_reason" in dut_job
assert dut_job["dut_job_fail_reason"] == reason
# Test case for updating status to fail without providing a reason
def test_update_status_fail_without_reason(custom_logger):
custom_logger.create_dut_job()
custom_logger.update_status_fail()
# Check if the status is updated and fail reason is empty
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
dut_job = logger_data["dut_jobs"][0]
assert "status" in dut_job
assert dut_job["status"] == "fail"
assert "dut_job_fail_reason" in dut_job
assert dut_job["dut_job_fail_reason"] == ""
# Test case for check_dut_timings with submission time earlier than start time
def test_check_dut_timings_submission_earlier_than_start(custom_logger, caplog):
custom_logger.create_dut_job()
# Set submission time to be earlier than start time
custom_logger.update_dut_time("start", "2023-01-01T11:00:00")
custom_logger.update_dut_time("submit", "2023-01-01T12:00:00")
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
job = logger_data["dut_jobs"][0]
# Call check_dut_timings
custom_logger.check_dut_timings(job)
# Check if an error message is logged
assert "Job submission is happening before job start." in caplog.text
# Test case for check_dut_timings with end time earlier than start time
def test_check_dut_timings_end_earlier_than_start(custom_logger, caplog):
custom_logger.create_dut_job()
# Set end time to be earlier than start time
custom_logger.update_dut_time("end", "2023-01-01T11:00:00")
custom_logger.update_dut_time("start", "2023-01-01T12:00:00")
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
job = logger_data["dut_jobs"][0]
# Call check_dut_timings
custom_logger.check_dut_timings(job)
# Check if an error message is logged
assert "Job ended before it started." in caplog.text
# Test case for check_dut_timings with valid timing sequence
def test_check_dut_timings_valid_timing_sequence(custom_logger, caplog):
custom_logger.create_dut_job()
# Set valid timing sequence
custom_logger.update_dut_time("submit", "2023-01-01T12:00:00")
custom_logger.update_dut_time("start", "2023-01-01T12:30:00")
custom_logger.update_dut_time("end", "2023-01-01T13:00:00")
logger_data = custom_logger.logger.data
assert "dut_jobs" in logger_data
assert len(logger_data["dut_jobs"]) == 1
job = logger_data["dut_jobs"][0]
# Call check_dut_timings
custom_logger.check_dut_timings(job)
# Check that no error messages are logged
assert "Job submission is happening before job start." not in caplog.text
assert "Job ended before it started." not in caplog.text

View file

@ -0,0 +1,182 @@
import json
from pathlib import Path
import pytest
from mock import MagicMock, patch
from structured_logger import (
AutoSaveDict,
CSVStrategy,
JSONStrategy,
StructuredLogger,
YAMLStrategy,
)
@pytest.fixture(params=[CSVStrategy, JSONStrategy, YAMLStrategy])
def strategy(request):
return request.param
@pytest.fixture
def file_extension(strategy):
if strategy == CSVStrategy:
return "csv"
elif strategy == JSONStrategy:
return "json"
elif strategy == YAMLStrategy:
return "yaml"
@pytest.fixture
def tmp_file(tmp_path):
return tmp_path / "test.json"
def test_guess_strategy_from_file(tmp_path, strategy, file_extension):
file_name = tmp_path / f"test_guess.{file_extension}"
Path(file_name).touch()
guessed_strategy = StructuredLogger.guess_strategy_from_file(file_name)
assert isinstance(guessed_strategy, strategy)
def test_get_strategy(strategy, file_extension):
result = StructuredLogger.get_strategy(file_extension)
assert isinstance(result, strategy)
def test_invalid_file_extension(tmp_path):
file_name = tmp_path / "test_invalid.xyz"
Path(file_name).touch()
with pytest.raises(ValueError, match="Unknown strategy for: xyz"):
StructuredLogger.guess_strategy_from_file(file_name)
def test_non_existent_file(tmp_path, strategy, file_extension):
file_name = tmp_path / f"non_existent.{file_extension}"
logger = StructuredLogger(file_name, strategy())
assert logger.file_path.exists()
assert "_timestamp" in logger._data
@pytest.fixture
def structured_logger_module():
with patch.dict("sys.modules", {"polars": None, "ruamel.yaml": None}):
import importlib
import structured_logger
importlib.reload(structured_logger)
yield structured_logger
def test_missing_csv_library(tmp_path, structured_logger_module):
with pytest.raises(RuntimeError, match="Can't parse CSV files. Missing library"):
structured_logger_module.CSVStrategy()
def test_missing_yaml_library(tmp_path, structured_logger_module):
with pytest.raises(RuntimeError, match="Can't parse YAML files. Missing library"):
structured_logger_module.YAMLStrategy()
def test_autosavedict_setitem():
save_callback = MagicMock()
d = AutoSaveDict(save_callback=save_callback)
d["key"] = "value"
assert d["key"] == "value"
save_callback.assert_called_once()
def test_autosavedict_delitem():
save_callback = MagicMock()
d = AutoSaveDict({"key": "value"}, save_callback=save_callback)
del d["key"]
assert "key" not in d
save_callback.assert_called_once()
def test_autosavedict_pop():
save_callback = MagicMock()
d = AutoSaveDict({"key": "value"}, save_callback=save_callback)
result = d.pop("key")
assert result == "value"
assert "key" not in d
save_callback.assert_called_once()
def test_autosavedict_update():
save_callback = MagicMock()
d = AutoSaveDict({"key": "old_value"}, save_callback=save_callback)
d.update({"key": "new_value"})
assert d["key"] == "new_value"
save_callback.assert_called_once()
def test_structured_logger_setitem(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data["field"] = "value"
with open(tmp_file, "r") as f:
data = json.load(f)
assert data["field"] == "value"
def test_structured_logger_set_recursive(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data["field"] = {"test": True}
other = logger.data["field"]
other["late"] = True
with open(tmp_file, "r") as f:
data = json.load(f)
assert data["field"]["test"]
assert data["field"]["late"]
def test_structured_logger_set_list(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data["field"] = [True]
other = logger.data["field"]
other.append(True)
with open(tmp_file, "r") as f:
data = json.load(f)
assert data["field"][0]
assert data["field"][1]
def test_structured_logger_delitem(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data["field"] = "value"
del logger.data["field"]
with open(tmp_file, "r") as f:
data = json.load(f)
assert "field" not in data
def test_structured_logger_pop(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data["field"] = "value"
logger.data.pop("field")
with open(tmp_file, "r") as f:
data = json.load(f)
assert "field" not in data
def test_structured_logger_update(tmp_file):
logger = StructuredLogger(tmp_file, JSONStrategy())
logger.data.update({"field": "value"})
with open(tmp_file, "r") as f:
data = json.load(f)
assert data["field"] == "value"

143
bin/ci/update_traces_checksum.py Executable file
View file

@ -0,0 +1,143 @@
#!/usr/bin/env python3
# Copyright © 2022 Collabora Ltd.
# Authors:
# David Heidelberg <david.heidelberg@collabora.com>
#
# For the dependencies, see the requirements.txt
# SPDX-License-Identifier: MIT
"""
Helper script to update traces checksums
"""
import argparse
import bz2
import glob
import re
import json
import sys
from ruamel.yaml import YAML
import gitlab
from colorama import Fore, Style
from gitlab_common import get_gitlab_project, read_token, wait_for_pipeline
DESCRIPTION_FILE = "export PIGLIT_REPLAY_DESCRIPTION_FILE='.*/install/(.*)'$"
DEVICE_NAME = "export PIGLIT_REPLAY_DEVICE_NAME='(.*)'$"
def gather_results(
project,
pipeline,
) -> None:
"""Gather results"""
target_jobs_regex = re.compile(".*-traces([:].*)?$")
for job in pipeline.jobs.list(all=True, sort="desc"):
if target_jobs_regex.match(job.name) and job.status == "failed":
cur_job = project.jobs.get(job.id)
# get variables
print(f"👁 {job.name}...")
log: list[str] = cur_job.trace().decode("unicode_escape").splitlines()
filename: str = ''
dev_name: str = ''
for logline in log:
desc_file = re.search(DESCRIPTION_FILE, logline)
device_name = re.search(DEVICE_NAME, logline)
if desc_file:
filename = desc_file.group(1)
if device_name:
dev_name = device_name.group(1)
if not filename or not dev_name:
print(Fore.RED + "Couldn't find device name or YML file in the logs!" + Style.RESET_ALL)
return
print(f"👁 Found {dev_name} and file {filename}")
# find filename in Mesa source
traces_file = glob.glob('./**/' + filename, recursive=True)
# write into it
with open(traces_file[0], 'r', encoding='utf-8') as target_file:
yaml = YAML()
yaml.compact(seq_seq=False, seq_map=False)
yaml.version = 1,2
yaml.width = 2048 # do not break the text fields
yaml.default_flow_style = None
target = yaml.load(target_file)
# parse artifact
results_json_bz2 = cur_job.artifact(path="results/results.json.bz2", streamed=False)
results_json = bz2.decompress(results_json_bz2).decode("utf-8", errors="replace")
results = json.loads(results_json)
for _, value in results["tests"].items():
if (
not value['images'] or
not value['images'][0] or
"image_desc" not in value['images'][0]
):
continue
trace: str = value['images'][0]['image_desc']
checksum: str = value['images'][0]['checksum_render']
if not checksum:
print(Fore.RED + f"{dev_name}: {trace}: checksum is missing! Crash?" + Style.RESET_ALL)
continue
if checksum == "error":
print(Fore.RED + f"{dev_name}: {trace}: crashed" + Style.RESET_ALL)
continue
if target['traces'][trace][dev_name].get('checksum') == checksum:
continue
if "label" in target['traces'][trace][dev_name]:
print(f'{dev_name}: {trace}: please verify that label {Fore.BLUE}{target["traces"][trace][dev_name]["label"]}{Style.RESET_ALL} is still valid')
print(Fore.GREEN + f'{dev_name}: {trace}: checksum updated' + Style.RESET_ALL)
target['traces'][trace][dev_name]['checksum'] = checksum
with open(traces_file[0], 'w', encoding='utf-8') as target_file:
yaml.dump(target, target_file)
def parse_args() -> None:
"""Parse args"""
parser = argparse.ArgumentParser(
description="Tool to generate patch from checksums ",
epilog="Example: update_traces_checksum.py --rev $(git rev-parse HEAD) "
)
parser.add_argument(
"--rev", metavar="revision", help="repository git revision", required=True
)
parser.add_argument(
"--token",
metavar="token",
help="force GitLab token, otherwise it's read from ~/.config/gitlab-token",
)
return parser.parse_args()
if __name__ == "__main__":
try:
args = parse_args()
token = read_token(args.token)
gl = gitlab.Gitlab(url="https://gitlab.freedesktop.org", private_token=token)
cur_project = get_gitlab_project(gl, "mesa")
print(f"Revision: {args.rev}")
(pipe, cur_project) = wait_for_pipeline([cur_project], args.rev)
print(f"Pipeline: {pipe.web_url}")
gather_results(cur_project, pipe)
sys.exit()
except KeyboardInterrupt:
sys.exit(1)

View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/../python-venv.sh" \
"$this_dir/requirements.txt" \
"$this_dir/update_traces_checksum.py" "$@"

View file

@ -45,24 +45,26 @@ def is_commit_valid(commit: str) -> bool:
return ret == 0
def branch_has_commit(upstream: str, branch: str, commit: str) -> bool:
def branch_has_commit(upstream_branch: str, commit: str) -> bool:
"""
Returns True if the commit is actually present in the branch
"""
ret = subprocess.call(['git', 'merge-base', '--is-ancestor',
commit, upstream + '/' + branch],
commit, upstream_branch],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
return ret == 0
def branch_has_backport_of_commit(upstream: str, branch: str, commit: str) -> str:
def branch_has_backport_of_commit(upstream_branch: str, commit: str) -> str:
"""
Returns the commit hash if the commit has been backported to the branch,
or an empty string if is hasn't
"""
upstream, _ = upstream_branch.split('/', 1)
out = subprocess.check_output(['git', 'log', '--format=%H',
upstream + '..' + upstream + '/' + branch,
upstream + '..' + upstream_branch,
'--grep', 'cherry picked from commit ' + commit],
stderr=subprocess.DEVNULL)
return out.decode().strip()
@ -125,17 +127,15 @@ if __name__ == "__main__":
help='colorize output (default: true if stdout is a terminal)')
args = parser.parse_args()
upstream, branch = args.branch.split('/', 1)
if branch_has_commit(upstream, branch, args.commit):
print_(args, True, 'Commit ' + args.commit + ' is in branch ' + branch)
if branch_has_commit(args.branch, args.commit):
print_(args, True, 'Commit ' + args.commit + ' is in branch ' + args.branch)
exit(0)
backport = branch_has_backport_of_commit(upstream, branch, args.commit)
backport = branch_has_backport_of_commit(args.branch, args.commit)
if backport:
print_(args, True,
'Commit ' + args.commit + ' was backported to branch ' + branch + ' as commit ' + backport)
'Commit ' + args.commit + ' was backported to branch ' + args.branch + ' as commit ' + backport)
exit(0)
print_(args, False, 'Commit ' + args.commit + ' is NOT in branch ' + branch)
print_(args, False, 'Commit ' + args.commit + ' is NOT in branch ' + args.branch)
exit(1)

View file

@ -88,33 +88,31 @@ def test_is_commit_valid(commit: str, expected: bool) -> None:
@pytest.mark.parametrize(
'branch, commit, expected',
[
('20.1', '20.1-branchpoint', True),
('20.1', '20.0', False),
('20.1', 'main', False),
('20.1', 'e58a10af640ba58b6001f5c5ad750b782547da76', True),
('20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
('staging/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', False),
('main', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
('20.0', 'd043d24654c851f0be57dbbf48274b5373dea42b', False),
(get_upstream() + '/20.1', '20.1-branchpoint', True),
(get_upstream() + '/20.1', '20.0', False),
(get_upstream() + '/20.1', 'main', False),
(get_upstream() + '/20.1', 'e58a10af640ba58b6001f5c5ad750b782547da76', True),
(get_upstream() + '/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
(get_upstream() + '/staging/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
(get_upstream() + '/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', False),
(get_upstream() + '/main', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
(get_upstream() + '/20.0', 'd043d24654c851f0be57dbbf48274b5373dea42b', False),
])
def test_branch_has_commit(branch: str, commit: str, expected: bool) -> None:
upstream = get_upstream()
assert branch_has_commit(upstream, branch, commit) == expected
assert branch_has_commit(branch, commit) == expected
@pytest.mark.parametrize(
'branch, commit, expected',
[
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
('staging/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
('20.1', '20.1-branchpoint', ''),
('20.1', '20.0', ''),
('20.1', '20.2', ''),
('20.1', 'main', ''),
('20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', ''),
('20.0', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', ''),
(get_upstream() + '/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
(get_upstream() + '/staging/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
(get_upstream() + '/20.1', '20.1-branchpoint', ''),
(get_upstream() + '/20.1', '20.0', ''),
(get_upstream() + '/20.1', '20.2', 'abac4859618e02aea00f705b841a7c5c5007ad1a'),
(get_upstream() + '/20.1', 'main', ''),
(get_upstream() + '/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', ''),
(get_upstream() + '/20.0', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', '8cd4f57381cefe69019a3282d457d5bda3644030'),
])
def test_branch_has_backport_of_commit(branch: str, commit: str, expected: bool) -> None:
upstream = get_upstream()
assert branch_has_backport_of_commit(upstream, branch, commit) == expected
assert branch_has_backport_of_commit(branch, commit) == expected

View file

@ -78,9 +78,9 @@ def commit(message: str) -> None:
def _calculate_release_start(major: str, minor: str) -> datetime.date:
"""Calclulate the start of the release for release candidates.
"""Calculate the start of the release for release candidates.
This is quarterly, on the second wednesday, in Januray, April, July, and Octobor.
This is quarterly, on the second wednesday, in January, April, July, and October.
"""
quarter = datetime.date.fromisoformat(f'20{major}-0{[1, 4, 7, 10][int(minor)]}-01')

View file

@ -52,7 +52,7 @@ def mock_csv(data: typing.List[gen_calendar_entries.CalendarRowType]) -> typing.
@pytest.fixture(autouse=True, scope='module')
def disable_git_commits() -> None:
"""Mock out the commit function so no git commits are made durring testing."""
"""Mock out the commit function so no git commits are made during testing."""
with mock.patch('bin.gen_calendar_entries.commit', mock.Mock()):
yield

View file

@ -168,6 +168,7 @@ class Inliner(states.Inliner):
break
# Quote all original backslashes
checked = re.sub('\x00', "\\\x00", checked)
checked = re.sub('@', '\\@', checked)
return docutils.utils.unescape(checked, 1)
inliner = Inliner();
@ -217,7 +218,10 @@ async def parse_issues(commits: str) -> typing.List[str]:
async def gather_bugs(version: str) -> typing.List[str]:
commits = await gather_commits(version)
issues = await parse_issues(commits)
if commits:
issues = await parse_issues(commits)
else:
issues = []
loop = asyncio.get_event_loop()
async with aiohttp.ClientSession(loop=loop) as session:
@ -276,7 +280,7 @@ def calculate_next_version(version: str, is_point: bool) -> str:
def calculate_previous_version(version: str, is_point: bool) -> str:
"""Calculate the previous version to compare to.
In the case of -rc to final that verison is the previous .0 release,
In the case of -rc to final that version is the previous .0 release,
(19.3.0 in the case of 20.0.0, for example). for point releases that is
the last point release. This value will be the same as the input value
for a point release, but different for a major release.
@ -295,7 +299,7 @@ def calculate_previous_version(version: str, is_point: bool) -> str:
def get_features(is_point_release: bool) -> typing.Generator[str, None, None]:
p = pathlib.Path(__file__).parent.parent / 'docs' / 'relnotes' / 'new_features.txt'
p = pathlib.Path('docs') / 'relnotes' / 'new_features.txt'
if p.exists() and p.stat().st_size > 0:
if is_point_release:
print("WARNING: new features being introduced in a point release", file=sys.stderr)
@ -303,6 +307,7 @@ def get_features(is_point_release: bool) -> typing.Generator[str, None, None]:
for line in f:
yield line.rstrip()
p.unlink()
subprocess.run(['git', 'add', p])
else:
yield "None"
@ -320,12 +325,13 @@ def update_release_notes_index(version: str) -> None:
if first_list and line.startswith('-'):
first_list = False
new_relnotes.append(f'- :doc:`{version} release notes <relnotes/{version}>`\n')
if not first_list and second_list and line.startswith(' relnotes/'):
if (not first_list and second_list and
re.match(r' \d+.\d+(.\d+)? <relnotes/\d+.\d+(.\d+)?>', line)):
second_list = False
new_relnotes.append(f' relnotes/{version}\n')
new_relnotes.append(f' {version} <relnotes/{version}>\n')
new_relnotes.append(line)
with relnotes_index_path.open('w') as f:
with relnotes_index_path.open('w', encoding='utf-8') as f:
for line in new_relnotes:
f.write(line)
@ -333,7 +339,7 @@ def update_release_notes_index(version: str) -> None:
async def main() -> None:
v = pathlib.Path(__file__).parent.parent / 'VERSION'
v = pathlib.Path('VERSION')
with v.open('rt') as f:
raw_version = f.read().strip()
is_point_release = '-rc' not in raw_version
@ -350,8 +356,8 @@ async def main() -> None:
gather_bugs(previous_version),
)
final = pathlib.Path(__file__).parent.parent / 'docs' / 'relnotes' / f'{this_version}.rst'
with final.open('wt') as f:
final = pathlib.Path('docs') / 'relnotes' / f'{this_version}.rst'
with final.open('wt', encoding='utf-8') as f:
try:
f.write(TEMPLATE.render(
bugfix=is_point_release,
@ -368,6 +374,7 @@ async def main() -> None:
))
except:
print(exceptions.text_error_template().render())
return
subprocess.run(['git', 'add', final])

View file

@ -76,7 +76,7 @@ async def test_gather_commits():
'content, bugs',
[
# It is important to have the title on a new line, as
# textwrap.dedent wont work otherwise.
# textwrap.dedent won't work otherwise.
# Test the `Closes: #N` syntax
(
@ -113,7 +113,7 @@ async def test_gather_commits():
'''\
A commit for for something else completely
Closes: https://github.com/Organiztion/project/1234
Closes: https://github.com/Organization/project/1234
''',
[],
),
@ -198,3 +198,8 @@ async def test_parse_issues(content: str, bugs: typing.List[str]) -> None:
mock.patch('bin.gen_release_notes.gather_commits', mock.AsyncMock(return_value='sha\n')):
ids = await parse_issues('1234 not used')
assert set(ids) == set(bugs)
@pytest.mark.asyncio
async def test_rst_escape():
out = inliner.quoteInline('foo@bar')
assert out == 'foo\@bar'

View file

@ -89,8 +89,8 @@ python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan
'''
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=gen_help)
parser.add_argument('--in_file', help='input template moudle definition file')
parser.add_argument('--out_file', help='output moudle definition file')
parser.add_argument('--in_file', help='input template module definition file')
parser.add_argument('--out_file', help='output module definition file')
parser.add_argument('--compiler_abi', help='compiler abi')
parser.add_argument('--compiler_id', help='compiler id')
parser.add_argument('--cpu_family', help='cpu family')

View file

@ -118,35 +118,36 @@ SOURCES = [
'api': 'opencl',
'inc_folder': 'CL',
'sources': [
Source('include/CL/opencl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/opencl.h'),
Source('include/CL/cl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl.h'),
Source('include/CL/cl_platform.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_platform.h'),
Source('include/CL/cl_gl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_gl.h'),
Source('include/CL/cl_gl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_gl_ext.h'),
Source('include/CL/cl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_ext.h'),
Source('include/CL/cl_version.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_version.h'),
Source('include/CL/cl_icd.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_icd.h'),
Source('include/CL/cl_egl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_egl.h'),
Source('include/CL/cl_d3d10.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_d3d10.h'),
Source('include/CL/cl_d3d11.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_d3d11.h'),
Source('include/CL/cl_dx9_media_sharing.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_dx9_media_sharing.h'),
Source('include/CL/cl_dx9_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_dx9_media_sharing_intel.h'),
Source('include/CL/cl_ext_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_ext_intel.h'),
Source('include/CL/cl_va_api_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_va_api_media_sharing_intel.h'),
Source('include/CL/opencl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/opencl.h'),
Source('include/CL/cl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl.h'),
Source('include/CL/cl_platform.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_platform.h'),
Source('include/CL/cl_gl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_gl.h'),
Source('include/CL/cl_gl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_gl_ext.h'),
Source('include/CL/cl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_ext.h'),
Source('include/CL/cl_version.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_version.h'),
Source('include/CL/cl_icd.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_icd.h'),
Source('include/CL/cl_egl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_egl.h'),
Source('include/CL/cl_d3d10.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_d3d10.h'),
Source('include/CL/cl_d3d11.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_d3d11.h'),
Source('include/CL/cl_dx9_media_sharing.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_dx9_media_sharing.h'),
Source('include/CL/cl_dx9_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_dx9_media_sharing_intel.h'),
Source('include/CL/cl_ext_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_ext_intel.h'),
Source('include/CL/cl_va_api_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/main/CL/cl_va_api_media_sharing_intel.h'),
Source('include/CL/cl.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/master/include/CL/cl.hpp'),
Source('include/CL/cl2.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/master/include/CL/cl2.hpp'),
Source('include/CL/cl.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/5f3cc41df821a3e5988490232082a3e3b82c0283/include/CL/cl.hpp'),
Source('include/CL/cl2.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/main/include/CL/cl2.hpp'),
Source('include/CL/opencl.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/main/include/CL/opencl.hpp'),
],
},
{
'api': 'spirv',
'sources': [
Source('src/compiler/spirv/spirv.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/spirv.h'),
Source('src/compiler/spirv/spirv.core.grammar.json', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/spirv.core.grammar.json'),
Source('src/compiler/spirv/OpenCL.std.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/OpenCL.std.h'),
Source('src/compiler/spirv/GLSL.std.450.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/GLSL.std.450.h'),
Source('src/compiler/spirv/GLSL.ext.AMD.h', 'https://github.com/KhronosGroup/glslang/raw/master/SPIRV/GLSL.ext.AMD.h'), # FIXME: is this the canonical source?
Source('src/compiler/spirv/spirv.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/main/include/spirv/unified1/spirv.h'),
Source('src/compiler/spirv/spirv.core.grammar.json', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/main/include/spirv/unified1/spirv.core.grammar.json'),
Source('src/compiler/spirv/OpenCL.std.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/main/include/spirv/unified1/OpenCL.std.h'),
Source('src/compiler/spirv/GLSL.std.450.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/main/include/spirv/unified1/GLSL.std.450.h'),
Source('src/compiler/spirv/GLSL.ext.AMD.h', 'https://github.com/KhronosGroup/glslang/raw/main/SPIRV/GLSL.ext.AMD.h'), # FIXME: is this the canonical source?
],
},

View file

@ -1,63 +0,0 @@
#!/usr/bin/env python3
from os import get_terminal_size
from textwrap import wrap
from mesonbuild import coredata
from mesonbuild import optinterpreter
(COLUMNS, _) = get_terminal_size()
def describe_option(option_name: str, option_default_value: str,
option_type: str, option_message: str) -> None:
print('name: ' + option_name)
print('default: ' + option_default_value)
print('type: ' + option_type)
for line in wrap(option_message, width=COLUMNS - 9):
print(' ' + line)
print('---')
oi = optinterpreter.OptionInterpreter('')
oi.process('meson_options.txt')
for (name, value) in oi.options.items():
if isinstance(value, coredata.UserStringOption):
describe_option(name,
value.value,
'string',
"You can type what you want, but make sure it makes sense")
elif isinstance(value, coredata.UserBooleanOption):
describe_option(name,
'true' if value.value else 'false',
'boolean',
"You can set it to 'true' or 'false'")
elif isinstance(value, coredata.UserIntegerOption):
describe_option(name,
str(value.value),
'integer',
"You can set it to any integer value between '{}' and '{}'".format(value.min_value, value.max_value))
elif isinstance(value, coredata.UserUmaskOption):
describe_option(name,
str(value.value),
'umask',
"You can set it to 'preserve' or a value between '0000' and '0777'")
elif isinstance(value, coredata.UserComboOption):
choices = '[' + ', '.join(["'" + v + "'" for v in value.choices]) + ']'
describe_option(name,
value.value,
'combo',
"You can set it to any one of those values: " + choices)
elif isinstance(value, coredata.UserArrayOption):
choices = '[' + ', '.join(["'" + v + "'" for v in value.choices]) + ']'
value = '[' + ', '.join(["'" + v + "'" for v in value.value]) + ']'
describe_option(name,
value,
'array',
"You can set it to one or more of those values: " + choices)
elif isinstance(value, coredata.UserFeatureOption):
describe_option(name,
value.value,
'feature',
"You can set it to 'auto', 'enabled', or 'disabled'")
else:
print(name + ' is an option of a type unknown to this script')
print('---')

View file

@ -25,7 +25,7 @@
"""Perf annotate for JIT code.
Linux `perf annotate` does not work with JIT code. This script takes the data
produced by `perf script` command, plus the diassemblies outputed by gallivm
produced by `perf script` command, plus the diassemblies outputted by gallivm
into /tmp/perf-XXXXX.map.asm and produces output similar to `perf annotate`.
See docs/llvmpipe.rst for usage instructions.

View file

@ -27,7 +27,7 @@ from pick.ui import UI, PALETTE
if __name__ == "__main__":
u = UI()
evl = urwid.AsyncioEventLoop(loop=asyncio.get_event_loop())
evl = urwid.AsyncioEventLoop(loop=asyncio.new_event_loop())
loop = urwid.MainLoop(u.render(), PALETTE, event_loop=evl, handle_mouse=False)
u.mainloop = loop
loop.run()

10
bin/pick-ui.sh Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
this_dir=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
readonly this_dir
exec \
"$this_dir/python-venv.sh" \
"$this_dir/pick/requirements.txt" \
"$this_dir/pick-ui.py" "$@"

View file

@ -40,16 +40,19 @@ if typing.TYPE_CHECKING:
sha: str
description: str
nominated: bool
nomination_type: typing.Optional[int]
nomination_type: int
resolution: typing.Optional[int]
main_sha: typing.Optional[str]
because_sha: typing.Optional[str]
notes: typing.Optional[str] = attr.ib(None)
IS_FIX = re.compile(r'^\s*fixes:\s*([a-f0-9]{6,40})', flags=re.MULTILINE | re.IGNORECASE)
# FIXME: I dislike the duplication in this regex, but I couldn't get it to work otherwise
IS_CC = re.compile(r'^\s*cc:\s*["\']?([0-9]{2}\.[0-9])?["\']?\s*["\']?([0-9]{2}\.[0-9])?["\']?\s*\<?mesa-stable',
flags=re.MULTILINE | re.IGNORECASE)
IS_REVERT = re.compile(r'This reverts commit ([0-9a-f]{40})')
IS_BACKPORT = re.compile(r'^\s*backport-to:\s*(\d{2}\.\d),?\s*(\d{2}\.\d)?',
flags=re.MULTILINE | re.IGNORECASE)
# XXX: hack
SEM = asyncio.Semaphore(50)
@ -71,6 +74,8 @@ class NominationType(enum.Enum):
CC = 0
FIXES = 1
REVERT = 2
NONE = 3
BACKPORT = 4
@enum.unique
@ -116,24 +121,24 @@ class Commit:
sha: str = attr.ib()
description: str = attr.ib()
nominated: bool = attr.ib(False)
nomination_type: typing.Optional[NominationType] = attr.ib(None)
nomination_type: NominationType = attr.ib(NominationType.NONE)
resolution: Resolution = attr.ib(Resolution.UNRESOLVED)
main_sha: typing.Optional[str] = attr.ib(None)
because_sha: typing.Optional[str] = attr.ib(None)
notes: typing.Optional[str] = attr.ib(None)
def to_json(self) -> 'CommitDict':
d: typing.Dict[str, typing.Any] = attr.asdict(self)
if self.nomination_type is not None:
d['nomination_type'] = self.nomination_type.value
d['nomination_type'] = self.nomination_type.value
if self.resolution is not None:
d['resolution'] = self.resolution.value
return typing.cast('CommitDict', d)
@classmethod
def from_json(cls, data: 'CommitDict') -> 'Commit':
c = cls(data['sha'], data['description'], data['nominated'], main_sha=data['main_sha'], because_sha=data['because_sha'])
if data['nomination_type'] is not None:
c.nomination_type = NominationType(data['nomination_type'])
c = cls(data['sha'], data['description'], data['nominated'], main_sha=data['main_sha'],
because_sha=data['because_sha'], notes=data['notes'])
c.nomination_type = NominationType(data['nomination_type'])
if data['resolution'] is not None:
c.resolution = Resolution(data['resolution'])
return c
@ -202,6 +207,14 @@ class Commit:
assert v
await ui.feedback(f'{self.sha} ({self.description}) committed successfully')
async def update_notes(self, ui: 'UI', notes: typing.Optional[str]) -> None:
self.notes = notes
async with ui.git_lock:
ui.save()
v = await commit_state(message=f'Updates notes for {self.sha}')
assert v
await ui.feedback(f'{self.sha} ({self.description}) notes updated successfully')
async def get_new_commits(sha: str) -> typing.List[typing.Tuple[str, str]]:
# Try to get the authoritative upstream main
@ -266,13 +279,11 @@ async def resolve_nomination(commit: 'Commit', version: str) -> 'Commit':
out = _out.decode()
# We give precedence to fixes and cc tags over revert tags.
# XXX: not having the walrus operator available makes me sad :=
m = IS_FIX.search(out)
if m:
if fix_for_commit := IS_FIX.search(out):
# We set the nomination_type and because_sha here so that we can later
# check to see if this fixes another staged commit.
try:
commit.because_sha = fixed = await full_sha(m.group(1))
commit.because_sha = fixed = await full_sha(fix_for_commit.group(1))
except PickUIException:
pass
else:
@ -281,18 +292,22 @@ async def resolve_nomination(commit: 'Commit', version: str) -> 'Commit':
commit.nominated = True
return commit
m = IS_CC.search(out)
if m:
if m.groups() == (None, None) or version in m.groups():
if backport_to := IS_BACKPORT.search(out):
if version in backport_to.groups():
commit.nominated = True
commit.nomination_type = NominationType.BACKPORT
return commit
if cc_to := IS_CC.search(out):
if cc_to.groups() == (None, None) or version in cc_to.groups():
commit.nominated = True
commit.nomination_type = NominationType.CC
return commit
m = IS_REVERT.search(out)
if m:
if revert_of := IS_REVERT.search(out):
# See comment for IS_FIX path
try:
commit.because_sha = reverted = await full_sha(m.group(1))
commit.because_sha = reverted = await full_sha(revert_of.group(1))
except PickUIException:
pass
else:

View file

@ -94,9 +94,9 @@ class TestRE:
Reviewed-by: Jonathan Marek <jonathan@marek.ca>
""")
m = core.IS_FIX.search(message)
assert m is not None
assert m.group(1) == '3d09bb390a39'
fix_for_commit = core.IS_FIX.search(message)
assert fix_for_commit is not None
assert fix_for_commit.group(1) == '3d09bb390a39'
class TestCC:
@ -114,9 +114,9 @@ class TestRE:
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '19.2'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '19.2'
def test_multiple_branches(self):
"""Tests commit with more than one branch specified"""
@ -130,10 +130,10 @@ class TestRE:
Reviewed-by: Pierre-Eric Pelloux-Prayer <pierre-eric.pelloux-prayer@amd.com>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '19.1'
assert m.group(2) == '19.2'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '19.1'
assert cc_to.group(2) == '19.2'
def test_no_branch(self):
"""Tests commit with no branch specification"""
@ -148,8 +148,8 @@ class TestRE:
Reviewed-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
""")
m = core.IS_CC.search(message)
assert m is not None
cc_to = core.IS_CC.search(message)
assert cc_to is not None
def test_quotes(self):
"""Tests commit with quotes around the versions"""
@ -162,9 +162,9 @@ class TestRE:
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '20.0'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '20.0'
def test_multiple_quotes(self):
"""Tests commit with quotes around the versions"""
@ -177,10 +177,10 @@ class TestRE:
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '20.0'
assert m.group(2) == '20.1'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '20.0'
assert cc_to.group(2) == '20.1'
def test_single_quotes(self):
"""Tests commit with quotes around the versions"""
@ -193,9 +193,9 @@ class TestRE:
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '20.0'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '20.0'
def test_multiple_single_quotes(self):
"""Tests commit with quotes around the versions"""
@ -208,10 +208,10 @@ class TestRE:
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
""")
m = core.IS_CC.search(message)
assert m is not None
assert m.group(1) == '20.0'
assert m.group(2) == '20.1'
cc_to = core.IS_CC.search(message)
assert cc_to is not None
assert cc_to.group(1) == '20.0'
assert cc_to.group(2) == '20.1'
class TestRevert:
@ -232,9 +232,61 @@ class TestRE:
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
""")
m = core.IS_REVERT.search(message)
assert m is not None
assert m.group(1) == '2ca8629fa9b303e24783b76a7b3b0c2513e32fbd'
revert_of = core.IS_REVERT.search(message)
assert revert_of is not None
assert revert_of.group(1) == '2ca8629fa9b303e24783b76a7b3b0c2513e32fbd'
class TestBackportTo:
def test_single_release(self):
"""Tests commit meant for a single branch, ie, 19.1"""
message = textwrap.dedent("""\
radv: fix DCC fast clear code for intensity formats
This fixes a rendering issue with DiRT 4 on GFX10. Only GFX10 was
affected because intensity formats are different.
Backport-to: 19.2
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/1923
Signed-off-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
""")
backport_to = core.IS_BACKPORT.search(message)
assert backport_to is not None
assert backport_to.groups() == ('19.2', None)
def test_multiple_release_space(self):
"""Tests commit with more than one branch specified"""
message = textwrap.dedent("""\
radeonsi: enable zerovram for Rocket League
Fixes corruption on game startup.
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/1888
Backport-to: 19.1 19.2
Reviewed-by: Pierre-Eric Pelloux-Prayer <pierre-eric.pelloux-prayer@amd.com>
""")
backport_to = core.IS_BACKPORT.search(message)
assert backport_to is not None
assert backport_to.groups() == ('19.1', '19.2')
def test_multiple_release_comma(self):
"""Tests commit with more than one branch specified"""
message = textwrap.dedent("""\
radeonsi: enable zerovram for Rocket League
Fixes corruption on game startup.
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/1888
Backport-to: 19.1, 19.2
Reviewed-by: Pierre-Eric Pelloux-Prayer <pierre-eric.pelloux-prayer@amd.com>
""")
backport_to = core.IS_BACKPORT.search(message)
assert backport_to is not None
assert backport_to.groups() == ('19.1', '19.2')
class TestResolveNomination:
@ -242,7 +294,7 @@ class TestResolveNomination:
@attr.s(slots=True)
class FakeSubprocess:
"""A fake asyncio.subprocess like classe for use with mock."""
"""A fake asyncio.subprocess like class for use with mock."""
out: typing.Optional[bytes] = attr.ib(None)
returncode: int = attr.ib(0)
@ -323,6 +375,28 @@ class TestResolveNomination:
assert not c.nominated
assert c.nomination_type is None
@pytest.mark.asyncio
async def test_backport_is_nominated(self):
s = self.FakeSubprocess(b'Backport-to: 16.2')
c = core.Commit('abcdef1234567890', 'a commit')
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
await core.resolve_nomination(c, '16.2')
assert c.nominated
assert c.nomination_type is core.NominationType.BACKPORT
@pytest.mark.asyncio
async def test_backport_is_not_nominated(self):
s = self.FakeSubprocess(b'Backport-to: 16.2')
c = core.Commit('abcdef1234567890', 'a commit')
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
await core.resolve_nomination(c, '16.1')
assert not c.nominated
assert c.nomination_type is None
@pytest.mark.asyncio
async def test_revert_is_nominated(self):
s = self.FakeSubprocess(b'This reverts commit 1234567890123456789012345678901234567890.')
@ -347,6 +421,21 @@ class TestResolveNomination:
assert not c.nominated
assert c.nomination_type is core.NominationType.REVERT
@pytest.mark.asyncio
async def test_is_fix_and_backport(self):
s = self.FakeSubprocess(
b'Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)\n'
b'Backport-to: 16.1'
)
c = core.Commit('abcdef1234567890', 'a commit')
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
await core.resolve_nomination(c, '16.1')
assert c.nominated
assert c.nomination_type is core.NominationType.FIXES
@pytest.mark.asyncio
async def test_is_fix_and_cc(self):
s = self.FakeSubprocess(

View file

@ -0,0 +1,2 @@
attrs==23.1.0
urwid==2.1.2

View file

@ -47,6 +47,13 @@ class RootWidget(urwid.Frame):
super().__init__(*args, **kwargs)
self.ui = ui
class CommitList(urwid.ListBox):
def __init__(self, *args, ui: 'UI', **kwargs):
super().__init__(*args, **kwargs)
self.ui = ui
def keypress(self, size: int, key: str) -> typing.Optional[str]:
if key == 'q':
raise urwid.ExitMainLoop()
@ -101,6 +108,23 @@ class CommitWidget(urwid.Text):
return None
class FocusAwareEdit(urwid.Edit):
"""An Edit type that signals when it comes into and leaves focus."""
signals = urwid.Edit.signals + ['focus_changed']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__is_focus = False
def render(self, size: typing.Tuple[int], focus: bool = False) -> urwid.Canvas:
if focus != self.__is_focus:
self._emit("focus_changed", focus)
self.__is_focus = focus
return super().render(size, focus)
@attr.s(slots=True)
class UI:
@ -112,6 +136,7 @@ class UI:
commit_list: typing.List['urwid.Button'] = attr.ib(factory=lambda: urwid.SimpleFocusListWalker([]), init=False)
feedback_box: typing.List['urwid.Text'] = attr.ib(factory=lambda: urwid.SimpleFocusListWalker([]), init=False)
notes: 'FocusAwareEdit' = attr.ib(factory=lambda: FocusAwareEdit('', multiline=True), init=False)
header: 'urwid.Text' = attr.ib(factory=lambda: urwid.Text('Mesa Stable Picker', align='center'), init=False)
body: 'urwid.Columns' = attr.ib(attr.Factory(lambda s: s._make_body(), True), init=False)
footer: 'urwid.Columns' = attr.ib(attr.Factory(lambda s: s._make_footer(), True), init=False)
@ -122,10 +147,36 @@ class UI:
new_commits: typing.List['core.Commit'] = attr.ib(factory=list, init=False)
git_lock: asyncio.Lock = attr.ib(factory=asyncio.Lock, init=False)
def _get_current_commit(self) -> typing.Optional['core.Commit']:
entry = self.commit_list.get_focus()[0]
return entry.original_widget.commit if entry is not None else None
def _change_notes_cb(self) -> None:
commit = self._get_current_commit()
if commit and commit.notes:
self.notes.set_edit_text(commit.notes)
else:
self.notes.set_edit_text('')
def _change_notes_focus_cb(self, notes: 'FocusAwareEdit', focus: 'bool') -> 'None':
# in the case of coming into focus we don't want to do anything
if focus:
return
commit = self._get_current_commit()
if commit is None:
return
text: str = notes.get_edit_text()
if text != commit.notes:
asyncio.ensure_future(commit.update_notes(self, text))
def _make_body(self) -> 'urwid.Columns':
commits = urwid.ListBox(self.commit_list)
commits = CommitList(self.commit_list, ui=self)
feedback = urwid.ListBox(self.feedback_box)
return urwid.Columns([commits, feedback])
urwid.connect_signal(self.commit_list, 'modified', self._change_notes_cb)
notes = urwid.Filler(self.notes)
urwid.connect_signal(self.notes, 'focus_changed', self._change_notes_focus_cb)
return urwid.Columns([urwid.LineBox(commits), urwid.Pile([urwid.LineBox(notes), urwid.LineBox(feedback)])])
def _make_footer(self) -> 'urwid.Columns':
body = [
@ -134,12 +185,12 @@ class UI:
urwid.Text('[C]herry Pick'),
urwid.Text('[D]enominate'),
urwid.Text('[B]ackport'),
urwid.Text('[A]pply additional patch')
urwid.Text('[A]pply additional patch'),
]
return urwid.Columns(body)
def _make_root(self) -> 'RootWidget':
return RootWidget(self.body, self.header, self.footer, 'body', ui=self)
return RootWidget(self.body, urwid.LineBox(self.header), urwid.LineBox(self.footer), 'body', ui=self)
def render(self) -> 'WidgetType':
asyncio.ensure_future(self.update())

47
bin/python-venv.sh Executable file
View file

@ -0,0 +1,47 @@
#!/usr/bin/env bash
set -eu
readonly requirements_file=$1
shift
venv_dir="$(dirname "$requirements_file")"/.venv
readonly venv_dir
readonly venv_req=$venv_dir/requirements.txt
readonly venv_python_version=$venv_dir/python-version.txt
if [ -d "$venv_dir" ]
then
if [ ! -r "$venv_python_version" ]
then
echo "Python environment predates Python version checks."
echo "It might be invalid and needs to be regenerated."
rm -rf "$venv_dir"
elif ! cmp --quiet <(python --version) "$venv_python_version"
then
old=$(cat "$venv_python_version")
new=$(python --version)
echo "Python version has changed ($old -> $new)."
echo "Python environment needs to be regenerated."
unset old new
rm -rf "$venv_dir"
fi
fi
if ! [ -r "$venv_dir/bin/activate" ]
then
echo "Creating Python environment..."
python -m venv "$venv_dir"
python --version > "$venv_python_version"
fi
# shellcheck disable=1091
source "$venv_dir/bin/activate"
if ! cmp --quiet "$requirements_file" "$venv_req"
then
echo "$(realpath --relative-to="$PWD" "$requirements_file") has changed, re-installing..."
pip --disable-pip-version-check install --requirement "$requirements_file"
cp "$requirements_file" "$venv_req"
fi
python "$@"

View file

@ -7,12 +7,41 @@ import subprocess
# This list contains symbols that _might_ be exported for some platforms
PLATFORM_SYMBOLS = [
'_GLOBAL_OFFSET_TABLE_',
'__bss_end__',
'__bss_start__',
'__bss_start',
'__cxa_guard_abort',
'__cxa_guard_acquire',
'__cxa_guard_release',
'__cxa_allocate_dependent_exception',
'__cxa_allocate_exception',
'__cxa_begin_catch',
'__cxa_call_unexpected',
'__cxa_current_exception_type',
'__cxa_current_primary_exception',
'__cxa_decrement_exception_refcount',
'__cxa_deleted_virtual',
'__cxa_demangle',
'__cxa_end_catch',
'__cxa_free_dependent_exception',
'__cxa_free_exception',
'__cxa_get_exception_ptr',
'__cxa_get_globals',
'__cxa_get_globals_fast',
'__cxa_increment_exception_refcount',
'__cxa_new_handler',
'__cxa_pure_virtual',
'__cxa_rethrow',
'__cxa_rethrow_primary_exception',
'__cxa_terminate_handler',
'__cxa_throw',
'__cxa_uncaught_exception',
'__cxa_uncaught_exceptions',
'__cxa_unexpected_handler',
'__dynamic_cast',
'__emutls_get_address',
'__gxx_personality_v0',
'__end__',
'__odr_asan._glapi_Context',
'__odr_asan._glapi_Dispatch',
@ -40,7 +69,7 @@ def get_symbols_nm(nm, lib):
if len(fields) == 2 or fields[1] == 'U':
continue
symbol_name = fields[0]
if platform_name == 'Linux':
if platform_name == 'Linux' or platform_name == 'GNU' or platform_name.startswith('GNU/'):
if symbol_name in PLATFORM_SYMBOLS:
continue
elif platform_name == 'Darwin':
@ -161,7 +190,7 @@ def main():
continue
if symbol[:2] == '_Z':
# As ajax found out, the compiler intentionally exports symbols
# that we explicitely asked it not to export, and we can't do
# that we explicitly asked it not to export, and we can't do
# anything about it:
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=36022#c4
continue

129
docs/_exts/bootstrap.py Normal file
View file

@ -0,0 +1,129 @@
# BSD 3-Clause License
#
# Copyright (c) 2018, pandas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Based on https://github.com/pydata/pydata-sphinx-theme
from docutils import nodes
import sphinx
from sphinx.ext.autosummary import autosummary_table
from sphinx.locale import admonitionlabels
import types
class BootstrapHTML5TranslatorMixin:
def __init__(self, *args, **kwds):
super().__init__(*args, **kwds)
self.settings.table_style = "table"
def starttag(self, *args, **kwargs):
"""ensure an aria-level is set for any heading role"""
if kwargs.get("ROLE") == "heading" and "ARIA-LEVEL" not in kwargs:
kwargs["ARIA-LEVEL"] = "2"
return super().starttag(*args, **kwargs)
def visit_admonition(self, node, name: str = '') -> None:
admonitionclasses = {
'attention': 'alert-primary',
'caution': 'alert-secondary',
'danger': 'alert-danger',
'error': 'alert-danger',
'hint': 'alert-secondary',
'important': 'alert-primary',
'note': 'alert-info',
'seealso': 'alert-info',
'tip': 'alert-info',
'warning': 'alert-warning',
}
self.body.append(self.starttag(
node, 'div', CLASS=('alert ' + admonitionclasses[name])))
if name:
self.body.append(
self.starttag(node, 'div', '', CLASS='h5'))
self.body.append(str(admonitionlabels[name]))
self.body.append('</div>')
def visit_table(self, node):
# init the attributes
atts = {}
self._table_row_indices.append(0)
# get the classes
classes = [cls.strip(" \t\n") for cls in self.settings.table_style.split(",")]
# we're looking at the 'real_table', which is wrapped by an autosummary
if isinstance(node.parent, autosummary_table):
classes += ["autosummary"]
# add the width if set in a style attribute
if "width" in node:
atts["style"] = f'width: {node["width"]}'
# add specific class if align is set
if "align" in node:
classes.append(f'table-{node["align"]}')
tag = self.starttag(node, "table", CLASS=" ".join(classes), **atts)
self.body.append(tag)
def setup_translators(app):
if app.builder.default_translator_class is None:
return
if not app.registry.translators.items():
translator = types.new_class(
"BootstrapHTML5Translator",
(
BootstrapHTML5TranslatorMixin,
app.builder.default_translator_class,
),
{},
)
app.set_translator(app.builder.name, translator, override=True)
else:
for name, klass in app.registry.translators.items():
if app.builder.format != "html":
# Skip translators that are not HTML
continue
translator = types.new_class(
"BootstrapHTML5Translator",
(
BootstrapHTML5TranslatorMixin,
klass,
),
{},
)
app.set_translator(name, translator, override=True)
def setup(app):
app.connect("builder-inited", setup_translators)

View file

@ -6,17 +6,8 @@
import docutils.nodes
import sphinx.addnodes
def parse_envvar(env, sig, signode):
envvar, t, default = sig.split(" ", 2)
envvar = envvar.strip().upper()
t = "Type: %s" % t.strip(" <>").lower()
default = "Default: %s" % default.strip(" ()")
signode += sphinx.addnodes.desc_name(envvar, envvar)
signode += docutils.nodes.Text(' ')
signode += sphinx.addnodes.desc_type(t, t)
signode += docutils.nodes.Text(', ')
signode += sphinx.addnodes.desc_annotation(default, default)
return envvar
from sphinx.util.nodes import split_explicit_title
from docutils import nodes, utils
def parse_opcode(env, sig, signode):
opcode, desc = sig.split("-", 1)
@ -26,8 +17,33 @@ def parse_opcode(env, sig, signode):
signode += sphinx.addnodes.desc_annotation(desc, desc)
return opcode
def ext_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
has_explicit_title, title, ext = split_explicit_title(text)
parts = ext.split('_', 2)
if parts[0] == 'VK':
full_url = f'https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/{ext}.html'
elif parts[0] == 'GL':
full_url = f'https://registry.khronos.org/OpenGL/extensions/{parts[1]}/{parts[1]}_{parts[2]}.txt'
else:
raise Exception(f'Unexpected API: {parts[0]}')
pnode = nodes.reference(title, title, internal=False, refuri=full_url)
return [pnode], []
def vkfeat_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
has_explicit_title, title, ext = split_explicit_title(text)
full_url = f'https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#features-{ext}'
pnode = nodes.reference(title, title, internal=False, refuri=full_url)
return [pnode], []
def setup(app):
app.add_object_type("envvar", "envvar", "%s (environment variable)",
parse_envvar)
app.add_object_type("opcode", "opcode", "%s (TGSI opcode)",
parse_opcode)
app.add_role('ext', ext_role)
app.add_role('vk-feat', vkfeat_role)

View file

@ -40,7 +40,7 @@ import nir_opcodes
OP_DESC_TEMPLATE = mako.template.Template("""
<%
def src_decl_list(num_srcs):
return ', '.join('nir_ssa_def *src' + str(i) for i in range(num_srcs))
return ', '.join('nir_def *src' + str(i) for i in range(num_srcs))
def to_yn(b):
return 'Y' if b else 'N'
@ -58,6 +58,8 @@ def to_yn(b):
- ${to_yn('associative' in op.algebraic_properties)}
- ${to_yn('2src_commutative' in op.algebraic_properties)}
${("**Description:** " + op.description) if op.description != "" else ""}
**Constant-folding:**
.. code-block:: c
@ -66,7 +68,7 @@ ${textwrap.indent(op.const_expr, ' ')}
**Builder function:**
.. c:function:: nir_ssa_def *nir_${op.name}(nir_builder *, ${src_decl_list(op.num_inputs)})
.. c:function:: nir_def *nir_${op.name}(nir_builder *, ${src_decl_list(op.num_inputs)})
""")
def parse_rst(state, parent, rst):

View file

@ -81,7 +81,7 @@ Additions to Chapter 8 of the GLES 3.2 Specification (Textures and Samplers)
BGRA_EXT B, G, R, A Color
Add to table 8.9 (Effective internal format correspondig to
Add to table 8.9 (Effective internal format corresponding to
external format).
Format Type Effective

View file

@ -360,7 +360,7 @@ Revision History
Version 4, 2013/02/01 - Add issue #12 regarding texture / renderbuffer
format queries.
Version 5, 2013/02/14 - Add issues #13 and #14 regarding simpler queires
Version 5, 2013/02/14 - Add issues #13 and #14 regarding simpler queries
after the context is created and made current.
Add issue #15 regarding the string query.
Add issue #16 regarding the value type returned

View file

@ -0,0 +1,105 @@
Name
MESA_sampler_objects
Name Strings
GL_MESA_sampler_objects
Contact
Adam Jackson <ajax@redhat.com>
Contributors
Emma Anholt
The contributors to ARB_sampler_objects and OpenGL ES 3
Status
Shipping
Version
Last Modified Date: 14 Sep 2021
Author Revision: 3
Number
TBD
Dependencies
OpenGL ES 2.0 is required.
This extension interacts with:
- EXT_shadow_samplers
- EXT_texture_filter_anisotropic
- EXT_texture_sRGB_decode
- OES_texture_border_clamp
Overview
This extension makes the sampler object subset of OpenGL ES 3.0 available
in OpenGL ES 2.0 contexts. As the intent is to allow access to the API
without necessarily requiring additional renderer functionality, some
sampler state that would be mandatory in GLES 3 is dependent on the
presence of additional extensions. Under GLES 3.0 or above this extension's
name string may be exposed for compatibility, but it is otherwise without
effect.
Refer to the OpenGL ES 3.0 specification for API details not covered here.
New Procedures and Functions
void glGenSamplers (GLsizei count, GLuint *samplers);
void glDeleteSamplers (GLsizei count, const GLuint *samplers);
GLboolean glIsSampler (GLuint sampler);
void glBindSampler (GLuint unit, GLuint sampler);
void glSamplerParameteri (GLuint sampler, GLenum pname, GLint param);
void glSamplerParameteriv (GLuint sampler, GLenum pname, const GLint *param);
void glSamplerParameterf (GLuint sampler, GLenum pname, GLfloat param);
void glSamplerParameterfv (GLuint sampler, GLenum pname, const GLfloat *param);
void glGetSamplerParameteriv (GLuint sampler, GLenum pname, GLint *params);
void glGetSamplerParameterfv (GLuint sampler, GLenum pname, GLfloat *params);
Note that these names are exactly as in ES3, with no MESA suffix.
New Tokens
SAMPLER_BINDING 0x8919
Interactions
If EXT_shadow_samplers is not supported then TEXTURE_COMPARE_MODE and
TEXTURE_COMPARE_FUNC will generate INVALID_ENUM.
If EXT_texture_filter_anisotropic is not supported then
TEXTURE_MAX_ANISOTROPY_EXT will generate INVALID_ENUM.
If EXT_texture_sRGB_decode is not supported then TEXTURE_SRGB_DECODE_EXT
will generate INVALID_ENUM.
If OES_texture_border_clamp is not supported then TEXTURE_BORDER_COLOR
will generate INVALID_ENUM.
Issues
1) Why bother?
Sampler objects, at least in Mesa, are generically supported without any
driver-dependent requirements, so enabling this is essentially free. This
simplifies application support for otherwise GLES2 hardware, and for
drivers in development that haven't yet achieved GLES3.
Revision History
Rev. Date Author Changes
---- -------- -------- ---------------------------------------------
1 2019/10/22 ajax Initial revision
2 2019/11/14 ajax Add extension interactions:
- EXT_shadow_samplers
- EXT_texture_filter_anisotropic
- EXT_texture_sRGB_decode
- OES_texture_border_clamp
3 2021/09/14 ajax Expand the justification and ES3 interaction

View file

@ -46,7 +46,7 @@ Overview
GL_ARB_gpu_shader5 extends GLSL in a number of useful ways. Much of this
added functionality requires significant hardware support. There are many
aspects, however, that can be easily implmented on any GPU with "real"
aspects, however, that can be easily implemented on any GPU with "real"
integer support (as opposed to simulating integers using floating point
calculations).

View file

@ -0,0 +1,83 @@
Name
MESA_texture_const_bandwidth
Name Strings
GL_MESA_texture_const_bandwidth
Contact
Rob Clark <robdclark@chromium.org>
Contributors
Rob Clark, Google
Lina Versace, Google
Tapani Pälli, Intel
Status
Proposal
Version
Version 1, September, 2023
Number
tbd
Dependencies
Requires EXT_memory_object.
Overview
The use of data dependent bandwidth compressed formats (UBWC, AFBC, etc)
can introduce a form of side-channel, in that the bandwidth used for
texture access is dependent on the texture's contents. In some cases
an application may want to disable the use of data dependent formats on
specific textures.
For that purpose, this extension extends EXT_memory_object to introduce
a new <param> CONST_BW_TILING_MESA.
IP Status
None
Issues
None
New Procedures and Functions
None
New Types
None
New Tokens
Returned in the <params> parameter of GetInternalFormativ or
GetInternalFormati64v when the <pname> parameter is TILING_TYPES_EXT,
returned in the <params> parameter of GetTexParameter{if}v,
GetTexParameterI{i ui}v, GetTextureParameter{if}v, and
GetTextureParameterI{i ui}v when the <pname> parameter is
TEXTURE_TILING_EXT, and accepted by the <params> parameter of
TexParameter{ifx}{v}, TexParameterI{i ui}v, TextureParameter{if}{v},
TextureParameterI{i ui}v when the <pname> parameter is
TEXTURE_TILING_EXT:
CONST_BW_TILING_MESA 0x8BBE
Errors
None
Revision History
Version 1, 2023-9-28 (Rob Clark)
Initial draft.

View file

@ -51,7 +51,7 @@ Overview
monitor. The screen surface can be scrolled by changing this origin.
This extension also defines functions for controlling the monitor's
display mode (width, height, refresh rate, etc), and specifing which
display mode (width, height, refresh rate, etc), and specifying which
screen surface is to be displayed on a monitor.
The new EGLModeMESA type and related functions are very similar to the

View file

@ -12,7 +12,7 @@ Contact
Status
Not shipping.
Obsolete.
Version
@ -70,7 +70,7 @@ Changes to Chapter 2 of the GLX 1.3 Specification (Functions and Errors)
In addition, an indirect rendering context can be current for
only one thread at a time. A direct rendering context may be
current to multiple threads, with synchronization of access to
the context thruogh the GL managed by the application through
the context through the GL managed by the application through
mutexes.
Changes to Chapter 3 of the GLX 1.3 Specification (Functions and Errors)

Some files were not shown because too many files have changed in this diff Show more