Add smoke-tests and fpga logs on dashboard (#1928)

This commit is contained in:
valentinThomazic 2024-03-14 14:43:56 +01:00 committed by GitHub
parent 94f6528e1f
commit fd12ee596c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 66 additions and 12 deletions

View file

@ -119,7 +119,8 @@ build_tools:
- echo $SYN_VCS_BASHRC; source $SYN_VCS_BASHRC
.simu_after_script: &simu_after_script
- for i in verif/sim/*/v*_sim/*.log.iss ; do head -10000 $i > artifacts/logs/$(basename $i).head ; done
- for i in $(find verif/sim/*/v*_sim -type f \( -name "*.csv" -o -name "*.iss" \)) ; do head -10000 $i > artifacts/logs/$(basename $i).head ; done
- head -10000 verif/sim/logfile.log > artifacts/logs/logfile.log.head
- python3 .gitlab-ci/scripts/report_simu.py verif/sim/logfile.log
smoke:
@ -240,10 +241,12 @@ fpga-build:
script:
- source $VIVADO_SETUP
- source ./verif/sim/setup-env.sh
- make fpga target=$TARGET
- mkdir -p artifacts/logs
- make fpga target=$TARGET 2>&1 | tee artifacts/logs/logfile.log
- tail -20 artifacts/logs/logfile.log > artifacts/logs/logfile.log.tail
- mkdir -p artifacts/reports
- mv corev_apu/fpga/work-fpga/ariane_xilinx.bit artifacts/ariane_xilinx_$TARGET.bit
- python3 .gitlab-ci/scripts/report_fpga.py corev_apu/fpga/reports/ariane.utilization.rpt
- python3 .gitlab-ci/scripts/report_fpga.py corev_apu/fpga/reports/ariane.utilization.rpt artifacts/logs/logfile.tail
.regress_test:
stage: heavy tests

View file

@ -74,6 +74,22 @@ class TableMetric(Metric):
class TableStatusMetric(Metric):
"Table with status label for each line"
class _TableStatusMetricColumn():
def __init__(self, title, col_type):
self.title = title
self.col_type = col_type
def to_doc(self):
return { "title": self.title, "col_type": self.col_type }
def __init__(self, name):
super().__init__(name)
self.columns = []
def add_column(self, title, col_type):
"Set the table columns titles"
self.columns.append(TableStatusMetric._TableStatusMetricColumn(title, col_type))
def add_pass_label(self, label, *col):
"Insert a 'pass' line with given label in the table"
self._add_value('pass', label, *col)
@ -91,6 +107,12 @@ class TableStatusMetric(Metric):
"Insert a 'fail' line in the table"
self.add_fail_label("FAIL", *col)
def to_doc(self):
doc = super().to_doc()
if len(self.columns) > 0:
doc['columns'] = list(map(lambda col: col.to_doc(), self.columns))
return doc
def _add_value(self, status, label, *col):
self.values.append((status, label, list(col)))

View file

@ -15,6 +15,9 @@ import report_builder as rb
with open(str(sys.argv[1]), "r") as f:
log = f.read()
with open(str(sys.argv[2]), "r") as f:
outputlog = f.read()
pattern = re.compile(
"\|(?P<ind> +)(?P<Instance>[\w()\[\].]+) +\| +(?P<Module>[\w()\[\].]+) \| +(?P<TotalLUTs>\d+) \| +(?P<LogicLUTs>\d+) \| +(?P<LUTRAMs>\d+) \| +(?P<SRLs>\d+) \| +(?P<FFs>\d+) \| +(?P<RAMB36>\d+) \| +(?P<RAMB18>\d+) \| +(?P<DSP48Blocks>\d+) \|"
)
@ -47,5 +50,8 @@ for i in data:
i["DSP48Blocks"] + " DSP48Blocks",
)
report.add_metric(metric)
log_metric = rb.LogMetric("Last lines of logfile")
log_metric.values = outputlog.splitlines()
report.add_metric(metric, log_metric)
report.dump()

View file

@ -10,23 +10,46 @@
import sys
import re
import report_builder as rb
import os
with open(str(sys.argv[1]), 'r') as f:
log = f.read()
pattern = re.compile(
"^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )?Compiling (.*): .*(tests\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )?Found matching ISS: (\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )?ISA (\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )?\[(\w*)\]: (\d*) matched(?:, (\d*) mismatch)?$",
r'(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )Compiling (.*): .*(tests\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )Found matching ISS: (\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} INFO )Target: (\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2}(?: INFO ))ISA (\S*)$[\s\S]*?^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} (?:(?:INFO )\[(\w*)\]: (\d*) matched(?:, (\d*) mismatch)?)|(?:^(?:\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2})(?: ERROR )(\D{5})(?:.*)$))',
re.MULTILINE)
list_of_tests = pattern.findall(log)
metric = rb.TableStatusMetric('')
metric.add_column("TARGET", "text")
metric.add_column("ISA", "text")
metric.add_column("TEST_TYPE", "text")
metric.add_column("TEST", "text")
metric.add_column("OUTPUT", "log")
metric.add_column("TB_LOGS", "log")
metric.add_column("DISASSEMBLY", "log")
job_test_pass = 0
job_test_total = 0
logsPath = "logs/" + os.environ.get("CI_JOB_ID") + "/artifacts/logs/"
for i in list_of_tests:
job_test_total += 1
col = [i[3], i[2], i[0], i[1]] # isa testbench testsuite test
if i[4] == "PASSED":
target = i[3]
isa = i[4]
testsuite = i[0]
test = i[1].split("/")[-1].split(".")[0]
output_log = logsPath + 'logfile.log.head'
tb_log = logsPath + test + "." + target + '.log.iss.head'
disassembly = logsPath + test + "." + target + '.csv.head'
col = [target, isa, testsuite, test, output_log, tb_log, disassembly]
if i[5] == "PASSED":
metric.add_pass(*col)
job_test_pass += 1
else:

View file

@ -134,6 +134,7 @@ def parse_iss_yaml(iss, iss_yaml, isa, target, setting_dir, debug_cmd, priv):
if entry['iss'] == iss:
logging.info("Found matching ISS: %s" % entry['iss'])
m = re.search(r"rv(?P<xlen>[0-9]+?)(?P<variant>[a-z]+(_[szx]\w+)*)$", isa)
logging.info("Target: " + target)
if m: logging.info("ISA %0s" % isa)
else: logging.error("Illegal ISA %0s" % isa)
@ -285,7 +286,7 @@ def do_simulate(sim_cmd, test_list, cwd, sim_opts, seed_gen, csr_file,
(" +num_of_tests=%i " % test_cnt) + \
(" +start_idx=%d " % (i*batch_size)) + \
(" +asm_file_name=%s/asm_tests/%s " % (output_dir, test['test'])) + \
(" -l %s/sim_%s_%d%s.log " % (output_dir, test['test'], i, log_suffix))
(" -l %s/sim_%s_%d_%s.log " % (output_dir, test['test'], i, log_suffix))
if verbose:
cmd += "+UVM_VERBOSITY=UVM_HIGH "
cmd = re.sub("<seed>", str(rand_seed), cmd)
@ -664,7 +665,7 @@ def iss_sim(test_list, output_dir, iss_list, iss_yaml, iss_opts,
for i in range(0, test['iterations']):
prefix = ("%s/asm_tests/%s_%d" % (output_dir, test['test'], i))
elf = prefix + ".o"
log = ("%s/%s.%d.%s.log" % (log_dir, test['test'], i, target))
log = ("%s/%s_%d.%s.log" % (log_dir, test['test'], i, target))
cmd = get_iss_cmd(base_cmd, elf, target, log)
if 'iss_opts' in test:
cmd += ' '
@ -702,7 +703,7 @@ def iss_cmp(test_list, iss, target, output_dir, stop_on_first_error, exp, debug_
log_list = []
run_cmd(("echo 'Test binary: %s' >> %s" % (elf, report)))
for iss in iss_list:
log_list.append("%s/%s_sim/%s.%d.%s.log" % (output_dir, iss, test['test'], i, target))
log_list.append("%s/%s_sim/%s_%d.%s.log" % (output_dir, iss, test['test'], i, target))
compare_iss_log(iss_list, log_list, report, stop_on_first_error, exp)
save_regr_report(report)
@ -1050,7 +1051,7 @@ def check_spike_version():
get_env_var("SPIKE_PATH")
user_spike_version = subprocess.run("$SPIKE_PATH/spike -v", capture_output=True, text=True, shell=True)
user_spike_version_string = user_spike_version.stderr.strip()
print(user_spike_version)
if user_spike_version.returncode != 0:
incorrect_version_exit("Spike", "- unknown -", spike_version)
@ -1328,4 +1329,3 @@ if __name__ == "__main__":
sys.path.append(os.getcwd()+"/../../util")
from config_pkg_generator import *
main()