This commit is contained in:
Harry Callahan 2024-09-24 09:23:59 +02:00 committed by GitHub
commit 8ec6282ec8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
40 changed files with 3220 additions and 133 deletions

178
Makefile
View file

@ -6,57 +6,110 @@ all: help
.PHONY: help
help:
@echo "This is a short hand for running popular tasks."
@echo "Please check the documentation on how to get started"
@echo "or how to set-up the different environments."
@echo "The following targets are a good starting point for newcomers:"
@echo ""
@echo " > Run ibex simulations using verilator"
@echo " make run-simple-system-hello"
@echo " make run-coremark"
@echo " make run-pmp-smoke-test"
@echo " make run-csr-test"
@echo ""
@echo " > Run ibex DV environment"
@echo " > (If you have access to EDA simulation tools)"
@echo " make -C dv/uvm/core_ibex SIMULATOR=xlm ITERATIONS=4 TEST=riscv_rand_instr_test COV=1"
@echo ""
@echo " > Run yosys flow using nangate45"
@echo " make run_syn_yosys_nangate45"
@echo ""
@echo " > (caution: clears all working changes)"
@echo " make reset"
# Use a parallel run (make -j N) for a faster build
build-all: build-riscv-compliance build-simple-system build-arty-100 \
build-csr-test
# RISC-V compliance
.PHONY: build-riscv-compliance
build-riscv-compliance:
fusesoc --cores-root=. run --target=sim --setup --build \
lowrisc:ibex:ibex_riscv_compliance \
$(FUSESOC_CONFIG_OPTS)
#################
# Simple system #
#################
# Simple system
# Use the following targets:
# - "build-simple-system"
# - "run-simple-system"
.PHONY: build-simple-system
build-simple-system:
.PHONY: build-simple-system-hello-bin
.PHONY: run-simple-system-hello
simple-system-Vibex=build/lowrisc_ibex_ibex_simple_system_0/sim-verilator/Vibex_simple_system
hello-bin=examples/sw/simple_system/hello_test/hello_test.vmem
build-simple-system: $(simple-system-Vibex)
$(simple-system-Vibex):
fusesoc --cores-root=. run --target=sim --setup --build \
lowrisc:ibex:ibex_simple_system \
$(FUSESOC_CONFIG_OPTS)
simple-system-program = examples/sw/simple_system/hello_test/hello_test.vmem
sw-simple-hello: $(simple-system-program)
.PHONY: $(simple-system-program)
$(simple-system-program):
build-simple-system-hello-bin: $(hello-bin)
$(hello-bin):
cd examples/sw/simple_system/hello_test && $(MAKE)
Vibex_simple_system = \
build/lowrisc_ibex_ibex_simple_system_0/sim-verilator/Vibex_simple_system
$(Vibex_simple_system):
@echo "$@ not found"
@echo "Run \"make build-simple-system\" to create the dependency"
@false
run-simple-system: sw-simple-hello | $(Vibex_simple_system)
run-simple-system-hello: $(hello-bin) $(simple-system-Vibex)
build/lowrisc_ibex_ibex_simple_system_0/sim-verilator/Vibex_simple_system \
--raminit=$(simple-system-program)
--raminit=$(hello-bin)
@echo ""
cat ibex_simple_system.log
# Coremark
# See 'examples/sw/benchmarks/README.md' for more details
# Use the following targets:
.PHONY: build-ss-maxperf-coremark
.PHONY: build-coremark-sw
.PHONY: run-coremark
coremark-maxperf-Vibex=build/lowrisc_ibex_ibex_simple_system_0/sim-verilator/Vibex_simple_system
coremark-bin=examples/sw/benchmarks/coremark/coremark.elf
build-ss-maxperf-coremark: $(coremark-maxperf-Vibex)
$(coremark-maxperf-Vibex):
fusesoc --cores-root=. run --target=sim --setup --build lowrisc:ibex:ibex_simple_system `./util/ibex_config.py maxperf fusesoc_opts`
build-coremark-sw: $(coremark-bin)
$(coremark-bin):
make -C ./examples/sw/benchmarks/coremark/
run-coremark: build-ss-maxperf-coremark build-coremark-sw
$(coremark-maxperf-Vibex) --meminit=ram,$(coremark-bin)
@echo ""
grep "CoreMark" ./ibex_simple_system.log
# pmp_smoke_test
# Use the following targets:
.PHONY: build-simple-system
.PHONY: build-pmp-smoke-test-sw
.PHONY: run-pmp-smoke-test
pmp-smoke-test-bin=examples/sw/simple_system/pmp_smoke_test/pmp_smoke_test.elf
build-pmp-smoke-test-sw: $(pmp-smoke-test-bin)
$(pmp-smoke-test-bin):
make -C ./examples/sw/simple_system/pmp_smoke_test
run-pmp-smoke-test: build-simple-system build-pmp-smoke-test-sw
$(simple-system-Vibex) --meminit=ram,$(pmp-smoke-test-bin)
@echo ""
cat ibex_simple_system.log
########################
# Arty A7 FPGA example #
########################
# DEAD - DO NOT USE THIS SECTION #
# Arty A7 FPGA example
# Use the following targets (depending on your hardware):
# - "build-arty-35"
# - "build-arty-100"
# - "program-arty"
.PHONY: build-arty-35
.PHONY: build-arty-100
.PHONY: program-arty
arty-sw-program = examples/sw/led/led.vmem
sw-led: $(arty-sw-program)
@ -64,34 +117,27 @@ sw-led: $(arty-sw-program)
$(arty-sw-program):
cd examples/sw/led && $(MAKE)
.PHONY: build-arty-35
build-arty-35: sw-led
fusesoc --cores-root=. run --target=synth --setup --build \
lowrisc:ibex:top_artya7 --part xc7a35ticsg324-1L
.PHONY: build-arty-100
build-arty-100: sw-led
fusesoc --cores-root=. run --target=synth --setup --build \
lowrisc:ibex:top_artya7 --part xc7a100tcsg324-1
.PHONY: program-arty
program-arty:
fusesoc --cores-root=. run --target=synth --run \
lowrisc:ibex:top_artya7
# Lint check
.PHONY: lint-core-tracing
lint-core-tracing:
fusesoc --cores-root . run --target=lint lowrisc:ibex:ibex_core_tracing \
$(FUSESOC_CONFIG_OPTS)
##########################
# CS Registers testbench #
##########################
# CS Registers testbench
# Use the following targets:
# - "build-csr-test"
# - "run-csr-test"
.PHONY: build-csr-test
.PHONY: run-csr-test
build-csr-test:
fusesoc --cores-root=. run --target=sim --setup --build \
--tool=verilator lowrisc:ibex:tb_cs_registers
@ -102,11 +148,44 @@ $(Vtb_cs_registers):
@echo "Run \"make build-csr-test\" to create the dependency"
@false
.PHONY: run-csr-test
run-csr-test: | $(Vtb_cs_registers)
fusesoc --cores-root=. run --target=sim --run \
--tool=verilator lowrisc:ibex:tb_cs_registers
#############
# Synthesis #
#############
# Use the following targets:
.PHONY: run_syn_yosys_nangate45
run_syn_yosys_nangate45:
pushd syn/ && \
./syn_yosys.sh && \
popd
#########
# Other #
#########
# RISC-V compliance
.PHONY: build-riscv-compliance
build-riscv-compliance:
fusesoc --cores-root=. run --target=sim --setup --build \
lowrisc:ibex:ibex_riscv_compliance \
$(FUSESOC_CONFIG_OPTS)
########
# UTIL #
########
# Lint check
.PHONY: lint-top-tracing
lint-top-tracing:
fusesoc --cores-root . run --target=lint lowrisc:ibex:ibex_top_tracing \
$(FUSESOC_CONFIG_OPTS)
# Echo the parameters passed to fusesoc for the chosen IBEX_CONFIG
.PHONY: test-cfg
test-cfg:
@ -115,3 +194,12 @@ test-cfg:
.PHONY: python-lint
python-lint:
$(MAKE) -C util lint
.PHONY: clean
clean:
rm -rf build/
.PHONY: reset
reset:
git clean -ffdx; git clean -ffdX; git reset --hard HEAD

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

View file

@ -23,3 +23,5 @@ xmsc.log
# Generated by coverage
imc.key
mdv.log
.fsm.sch.verilog.xml

View file

@ -44,6 +44,8 @@ def _main() -> int:
'--end_signature_addr', md.signature_addr])]
retcode = run_one(md.verbose, md.riscvdv_build_cmds[0], redirect_stdstreams=md.riscvdv_build_stdout)
if retcode:
logger.warning(f"WARNING: Saw non-zero retcode while building instr_gen : logfile -> {md.riscvdv_build_stdout}")
return retcode

View file

@ -14,6 +14,7 @@ from metadata import RegressionMetadata, LockedMetadata
from ibex_cmd import get_compile_opts
from scripts_lib import run_one
import riscvdv_interface
import nix_lib
import logging
logger = logging.getLogger(__name__)
@ -34,7 +35,8 @@ def _get_iss_pkgconfig_flags(specifiers, iss_pc, simulator):
# tool, and its easy for the options to arrive malformed. Use
# the following hack to get it through.
if '-Wl' in _flags: # This should be in LDFLAGS only
_flags = "'-Xlinker {}'".format(_flags.replace('-Wl,', ''))
_flags = "-Xlinker,{}".format(_flags.replace('-Wl,', ''))
return _flags
@ -108,8 +110,22 @@ def _main() -> int:
compile_fd.write(f"Running compile_tb command :\n{' '.join(cmd)}\n".encode())
retcode = run_one(md.verbose, cmd, redirect_stdstreams=compile_fd)
if retcode:
logger.warning(f"WARNING: Saw non-zero retcode while compiling testbench : logfile -> {md.tb_build_stdout}")
return retcode
# If in a nix shell, patch the compiled simulation executable to use our chosen shared libraries
if os.getenv("IBEX_NIX_SHELL_LIB") is not None:
if md.simulator == "xlm":
so = md.dir_tb / "xcelium.d" / "run.d" / "librun.so"
# nix_lib.patch_rpath(so) # Doesn't work
nix_lib.patch_dtneeded(so)
# Finally, strip the rpath of unecessary entries
cmd = ["patchelf", str(so), "--shrink-rpath"]
subprocess.check_output(cmd)
return 0

View file

@ -51,6 +51,7 @@ def get_riscvdv_compile_cmds(md: RegressionMetadata, trr: TestRunResult) -> List
cmd=trr.compile_asm_gen_cmds[0],
redirect_stdstreams=trr.compile_asm_gen_log)
if dv_ret:
logger.warning(f"WARNING: Saw non-zero retcode while compiling test : logfile -> {trr.compile_asm_gen_log}")
return dv_ret
orig_cmds = []

View file

@ -30,6 +30,8 @@ def _main():
retcode = run_one(md.verbose, md.riscvdv_fcov_cmds[0], md.riscvdv_fcov_stdout)
if retcode:
logger.warning(f"WARNING: Saw non-zero retcode while generating riscv-dv fcov : logfile -> {md.riscvdv_fcov_stdout}")
return retcode

View file

@ -7,7 +7,6 @@
import argparse
import logging
import os
import sys
import pathlib3x as pathlib
@ -17,35 +16,47 @@ from metadata import RegressionMetadata, LockedMetadata
from setup_imports import _OT_LOWRISC_IP
from scripts_lib import run_one
import logging
logger = logging.getLogger(__name__)
def find_cov_dbs(start_dir: pathlib.Path, simulator: str) -> Set[pathlib.Path]:
"""Gather a set of the coverage databases."""
"""Gather the paths of all individual coverage databases for each test.
Each EDA tool may have a different format in which they save this data from the
run-phase, so use rglobs to locate these files relative to start_dir.
"""
cov_dbs = set()
if simulator == 'xlm':
for p in start_dir.glob('**/*.ucd'):
logging.info(f"Found coverage database (ucd) at {p}")
logger.info(f"Found coverage database (ucd) at {p}")
cov_dbs.add(p)
if simulator == 'vcs':
for p in start_dir.glob('**/test.vdb'):
logging.info(f"Found coverage database (vdb) at {p}")
logger.info(f"Found coverage database (vdb) at {p}")
cov_dbs.add(p)
if not cov_dbs:
logging.info(f"No coverage found for {simulator}")
return 1
logger.info(f"No coverage found for {simulator}")
return ()
return cov_dbs
def merge_cov_vcs(md: RegressionMetadata, cov_dirs: Set[pathlib.Path]) -> int:
cmd = (['urg', '-full64',
'-format', 'both',
'-dbname', str(md.dir_cov/'merged.vdb'),
'-report', str(md.dir_cov/'report'),
'-log', str(md.dir_cov/'merge.log'),
'-dir'] +
[str(cov_dir) for cov_dir in list(cov_dirs)])
"""Invoke 'urg' to merge the vcs coverage databases for each individual test
Returns: the retcode of the urg merge command.
"""
cmd = (
['urg',
'-full64',
'-format', 'both',
'-dbname', str(md.dir_cov/'merged.vdb'),
'-report', str(md.dir_cov/'report'),
'-log', str(md.dir_cov/'merge.log'),
'-dir'] + [str(cov_dir) for cov_dir in list(cov_dirs)]
)
with LockedMetadata(md.dir_metadata, __file__) as md:
md.cov_merge_log = md.dir_cov / 'merge.log'
@ -53,14 +64,20 @@ def merge_cov_vcs(md: RegressionMetadata, cov_dirs: Set[pathlib.Path]) -> int:
md.cov_merge_cmds = [cmd]
with open(md.cov_merge_stdout, 'wb') as fd:
logging.info("Generating merged coverage directory")
return run_one(md.verbose, cmd, redirect_stdstreams=fd)
logger.info("Generating merged coverage directory")
merge_ret = run_one(md.verbose, cmd, redirect_stdstreams=fd)
if merge_ret:
logger.warning(f"WARNING: Saw non-zero retcode while merging coverage : logfile -> {trr.cov_merge_stdout}")
return merge_ret
def merge_cov_xlm(md: RegressionMetadata, cov_dbs: Set[pathlib.Path]) -> int:
"""Merge xcelium-generated coverage using the OT scripts.
The vendored-in OpenTitan IP contains .tcl scripts that can merge xcelium
coverage using the Cadence 'imc' Integrated-Metrics-Centre tool.
Returns: the retcode of the imc merge command.
"""
xcelium_scripts = _OT_LOWRISC_IP/'dv/tools/xcelium'
@ -72,26 +89,28 @@ def merge_cov_xlm(md: RegressionMetadata, cov_dbs: Set[pathlib.Path]) -> int:
md.cov_merge_db_list = md.dir_cov / 'cov_db_runfile'
md.cov_merge_log = md.dir_cov / 'merge.log'
md.cov_merge_stdout = md.dir_cov / 'merge.log.stdout'
md.cov_merge_cmds = [(imc_cmd + ["-exec", str(xcelium_scripts/"cov_merge.tcl"),
"-logfile", str(md.dir_cov/'merge.log')])]
md.cov_merge_cmds = [(imc_cmd + ["-exec", str(xcelium_scripts / "cov_merge.tcl"),
"-logfile", str(md.dir_cov / 'merge.log')])]
md.cov_report_log = md.dir_cov / 'report.log'
md.cov_report_stdout = md.dir_cov / 'report.log.stdout'
md.cov_report_cmds = [(imc_cmd + ["-load", str(md.dir_cov_merged),
"-init", str(md.ibex_dv_root/"waivers"/"coverage_waivers_xlm.tcl"),
"-exec", str(xcelium_scripts/"cov_report.tcl"),
"-logfile", str(md.dir_cov/'report.log')])]
"-init", str(md.ibex_dv_root / "waivers" / "coverage_waivers_xlm.tcl"),
"-exec", str(xcelium_scripts / "cov_report.tcl"),
"-logfile", str(md.dir_cov / 'report.log')])]
# Dump the list of databases to a runfile, which will be read by the .tcl script
# This prevents the argument list from getting too long for an environment variable when using lots of iterations
# > The paths in the <runfile> should be listed one per line.
# > The path to the <runfile> should be set in the environment variable 'cov_db_runfile'
md.cov_merge_db_list.write_text( ('\n'.join(str(d.parent) for d in cov_dbs)) + '\n')
# The merge TCL code uses a glob to find all available scopes and previous
# runs. In order to actually get the databases we need to go up once so
# that the "*" finds the directory we've seen.
# (Parent of the .ucm file?)
cov_dir_parents = ' '.join(str(d.parent.parent) for d in cov_dbs)
# Finally, set an environment variable containing all the directories that
# should be merged (this is how the list gets passed down to the TCL script
# that handles them)
# Setup any environment variables needed by the coverage merge TCL scripts
# - cov_merge_db_dir : The location to output the merged coverage database
# - cov_report_dir : The location to output the reports database
# - cov_db_dirs : (Unused) A list of individual coverage databases to be merged
# - cov_db_runfile : The location of a file containing all individual coverage databases to be merged
# - DUT_TOP : Top-level module name of the DUT
xlm_cov_dirs = {
'cov_merge_db_dir': str(md.dir_cov_merged),
'cov_report_dir': str(md.dir_cov_report),
@ -99,15 +118,10 @@ def merge_cov_xlm(md: RegressionMetadata, cov_dbs: Set[pathlib.Path]) -> int:
'cov_db_runfile': str(md.cov_merge_db_list),
"DUT_TOP": md.dut_cov_rtl_path
}
logger.info(f"xlm_cov_dirs : {xlm_cov_dirs}")
xlm_env = os.environ.copy()
xlm_env.update(xlm_cov_dirs)
logging.info(f"xlm_cov_dirs : {xlm_cov_dirs}")
# Dump the list of databases to a file, which will be read by the .tcl script
# (This prevents the argument list from getting too long when using lots of iterations)
with open(md.cov_merge_db_list, 'w') as fd:
# > The runs in <runfile> should be listed one per line.
fd.write(('\n'.join(str(d.parent) for d in cov_dbs))+'\n')
# First do the merge
md.dir_cov_merged.mkdir(exist_ok=True, parents=True)
@ -117,6 +131,7 @@ def merge_cov_xlm(md: RegressionMetadata, cov_dbs: Set[pathlib.Path]) -> int:
redirect_stdstreams=fd,
env=xlm_env)
if merge_ret:
logger.warning(f"WARNING: Saw non-zero retcode while merging coverage : logfile -> {md.cov_merge_stdout}")
return merge_ret
# Then do the reporting
@ -127,6 +142,8 @@ def merge_cov_xlm(md: RegressionMetadata, cov_dbs: Set[pathlib.Path]) -> int:
redirect_stdstreams=fd,
env=xlm_env)
if report_ret:
logger.warning(f"WARNING: Saw non-zero retcode while reporting coverage : logfile -> {trr.cov_report_stdout}")
return report_ret
@ -140,11 +157,21 @@ def main():
if md.simulator not in ['xlm', 'vcs']:
raise ValueError(f'Unsupported simulator for merging coverage: {args.simulator}')
if md.verbose:
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG) # root -> most-permissive
logger.setLevel(logging.INFO)
md.dir_cov.mkdir(exist_ok=True, parents=True)
# Compile a list of all the coverage databases
cov_dbs = find_cov_dbs(md.dir_run, md.simulator)
if not cov_dbs:
logger.error("No coverage databases found. Unable to continue.")
return 1
# Call the appropriate merge function for each tool, returning the retcode of
# the subprocess tasks.
merge_funs = {
'vcs': merge_cov_vcs,
'xlm': merge_cov_xlm

View file

@ -0,0 +1,48 @@
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import os
import subprocess
import pathlib3x as pathlib
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def patch_rpath(so: pathlib.Path):
"""Patch the rpath of the simulation executable to resolve stdenv.cc correctly."""
nix_gcc_lib_path = os.getenv("IBEX_NIX_SHELL_LIB")
# Get the old rpath
cmd = ["patchelf", "--print-rpath", str(so)]
old_rpath = subprocess.check_output(cmd).decode()
logger.warning(f"Old rpath : {old_rpath}")
# Add the nix gcc lib path to the head of the shared library's RPATH
new_rpath_str = f"{nix_gcc_lib_path}:{old_rpath}"
cmd = ["patchelf", "--set-rpath", new_rpath_str, str(so)]
new_rpath_output = subprocess.check_output(cmd).decode()
logger.warning(f"Output of --set-rpath : {new_rpath_output}")
# Print the new rpath
cmd = ["patchelf", "--print-rpath", str(so)]
new_rpath = subprocess.check_output(cmd).decode()
logger.warning(f"New rpath : {new_rpath}")
def patch_dtneeded(so: pathlib.Path):
"""Patch some stdenv.cc shared library deps of the simulation .so to be static."""
# We need to setup a couple of .so deps to be static, as the 'xrun' utility
# uses it's own search paths when discovering shared-library deps for the librun.so
# when simulating.
nix_gcc_lib_path = os.getenv("IBEX_NIX_SHELL_LIB")
cmd1 = ["patchelf", "--replace-needed", "libstdc++.so.6", f"{nix_gcc_lib_path}/libstdc++.so.6", str(so)]
cmd2 = ["patchelf", "--replace-needed", "libgcc_s.so.1", f"{nix_gcc_lib_path}/libgcc_s.so.1", str(so)]
subprocess.check_output(cmd1)
subprocess.check_output(cmd2)

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
_GET_OBJS=$(find ./out/run -type f -iregex '.*test\.o')
if [[ -z "${RISCV_TOOLCHAIN}" ]]; then
echo "Please define RISCV_TOOLCHAIN to have access to objdump."

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
_GET_TRACES=$(find . -type f -iregex '.*trace_core.*\.log')
for trace in $_GET_TRACES; do
column -t -s $'\t' -o ' ' -R 1,2,3,4,5 "$trace" > "$(dirname "$trace")"/trace_pretty.log

View file

@ -68,7 +68,7 @@ def get_cov_cmd(md: RegressionMetadata) -> List[str]:
'--dir', str(md.dir_run),
'-o', str(md.dir_fcov),
'--simulator', md.simulator,
'--opts', '--gen_timeout 1000',
'--opts', '--gen_timeout 1000 ',
'--isa', md.isa_ibex,
'--custom_target', str(md.ibex_riscvdv_customtarget)]
if md.verbose:

View file

@ -201,6 +201,7 @@ def _main() -> int:
gen_retcode = run_one(md.verbose, trr.riscvdv_run_gen_cmds[0],
redirect_stdstreams=trr.riscvdv_run_gen_stdout)
if gen_retcode:
logger.warning(f"WARNING: Saw non-zero retcode while generating riscv-dv commands : logfile -> {trr.riscvdv_run_gen_stdout}")
return gen_retcode
# Those commands assume the riscv-dv directory layout, where the build
@ -223,7 +224,8 @@ def _main() -> int:
ret = 0
for cmd in trr.riscvdv_run_cmds:
ret = run_one(md.verbose, cmd, redirect_stdstreams=log_fd)
if ret != 0:
if ret:
logger.warning(f"WARNING: Saw non-zero retcode while generating riscv-dv tests : logfile -> {trr.riscvdv_run_stdout}")
break
test_file_copies = {

View file

@ -31,6 +31,9 @@ def _main() -> int:
md = RegressionMetadata.construct_from_metadata_dir(args.dir_metadata)
trr = TestRunResult.construct_from_metadata_dir(args.dir_metadata, f"{tds[0]}.{tds[1]}")
if md.verbose:
logger.setLevel(logging.INFO)
if (trr.testtype == TestType.RISCVDV):
testopts = get_test_entry(trr.testname, md.ibex_riscvdv_testlist)
elif (trr.testtype == TestType.DIRECTED):
@ -62,6 +65,7 @@ def _main() -> int:
'seed': str(trr.seed),
'binary': trr.binary,
'test_dir': trr.dir_test,
'SIM_DIR': trr.dir_test, # Read by 'dv/uvm/core_ibex/vcs.tcl'
'tb_dir': md.dir_tb,
'dir_shared_cov': md.dir_shared_cov,
'rtl_sim_log': trr.rtl_log,
@ -99,12 +103,15 @@ def _main() -> int:
try:
for cmd in trr.rtl_cmds:
# Note that we don't capture the success or failure of the subprocess:
sim_fd.write(f"Running run-rtl command :\n{' '.join(cmd)}\n".encode())
run_one(md.verbose, cmd,
redirect_stdstreams=sim_fd,
timeout_s=md.run_rtl_timeout_s+60, # Ideally we time-out inside the simulation
reraise=True) # Allow us to catch timeout exceptions at this level
retcode = run_one(md.verbose, cmd,
redirect_stdstreams=sim_fd,
timeout_s=md.run_rtl_timeout_s+60, # Ideally we time-out inside the simulation
reraise=True) # Allow us to catch timeout exceptions at this level
# Note that we don't bail out if the retcode is not 0, this may just be a test-failure
# we wish to capture.
if retcode:
logger.warning(f"WARNING: Saw non-zero retcode while running simulation command : logfile -> {trr.rtl_stdout}")
except subprocess.TimeoutExpired:
trr.failure_mode = Failure_Modes.TIMEOUT
trr.failure_message = "[FAILURE] Simulation process killed due to timeout " \

View file

@ -10,17 +10,30 @@ import sys
import pickle
import yaml
import pathlib3x as pathlib
from io import IOBase
import io
from io import IOBase, TextIOBase, BufferedIOBase
from typing import Dict, TextIO, Optional, Union, List
from typing_utils import get_args
import dataclasses
from typeguard import typechecked
from textwrap import dedent
import logging
logger = logging.getLogger(__name__)
def write_str(dest: IOBase, mystr: str) -> None:
"""Write string to IO destination, taking encoding of the destination into account."""
if issubclass(type(dest), io.TextIOBase):
# We opened as text, so no need to encode the string
dest.write(mystr)
elif issubclass(type(dest), (io.RawIOBase, io.BufferedIOBase)):
# We opened in binary mode, so need to encode the string
dest.write(mystr.encode())
else:
logger.error("Failure to determine 'dest' type!")
@typechecked
def run_one(verbose: bool,
cmd: List[str],
@ -43,37 +56,64 @@ def run_one(verbose: bool,
if redirect_stdstreams is not None:
if redirect_stdstreams == '/dev/null':
# If this string is passed, we should discard all stdstream outputs.
stdstream_dest = subprocess.DEVNULL
elif isinstance(redirect_stdstreams, pathlib.Path):
# We've been passed a filepath where to direct the logs.
# This file should not already exist. We need to open it
# in binary-mode.
stdstream_dest = open(redirect_stdstreams, 'wb')
needs_closing = True
elif isinstance(redirect_stdstreams, IOBase):
# We've been passed an already-open handle to an IO object.
# If there is any outstanding, unflushed output pending, flush
# it before continuing. This ensures this previous output is
# placed at the top of the file.
stdstream_dest = redirect_stdstreams
stdstream_dest.flush()
else:
raise RuntimeError(
f"redirect_stdstream called as {redirect_stdstreams} "
f"'redirect_stdstreams' given as {redirect_stdstreams} "
f"but that argument is invalid.")
cmd_str = ' '.join(shlex.quote(w) for w in cmd)
# If verbose, print the command before running it (the equivalent of bash -x)
if verbose:
# The equivalent of bash -x
cmd_str = ' '.join(shlex.quote(w) for w in cmd)
# If we are redirecting the stdstreams, print out the command with a shell
# redirection at the end to help the reader understand where the outputs may
# be located. The commands are not invoked in this way, but equivalently using
# subprocess.run's arguments 'stdout' and 'stderr' to direct the output.
if redirect_stdstreams is not None:
if isinstance(redirect_stdstreams, str):
redir = f'>{shlex.quote(redirect_stdstreams)}'
if stdstream_dest == subprocess.DEVNULL:
redir = f'>/dev/null'
elif issubclass(type(redirect_stdstreams), io.StringIO):
# We are redirecting to an in-memory buffer, so printing a redirection
# is not applicable.
redir = ""
else:
redir = f'>>{shlex.quote(redirect_stdstreams.name)}'
# Append the equivalent shell redirection to the command string.
cmd_str = f'{cmd_str} {redir} 2>&1'
print('+ ' + cmd_str, file=sys.stderr)
cmd_str = f"+ {cmd_str}"
# Try to print the command to the file as well. This will fail if it's
# a binary file: ignore the failure.
if stdstream_dest:
try:
print('+ ' + cmd_str, file=stdstream_dest)
except (TypeError, AttributeError):
pass
# Print the command we are about to run to stderr
print((cmd_str + '\n'), file=sys.stderr)
# Print the command to the redirected location as well.
if redirect_stdstreams is not None:
mystr = dedent(f"""\
#-----------#
{cmd_str}
#-----------#
""")
write_str(stdstream_dest, mystr)
stdstream_dest.flush()
# Run the command
try:
# Passing close_fds=False ensures that if cmd is a call to Make then
# we'll pass through the jobserver fds. If you don't do this, you get a
@ -84,6 +124,14 @@ def run_one(verbose: bool,
close_fds=False,
timeout=timeout_s,
env=env)
stdstream_dest.flush()
mystr = dedent(f"""\
#-----------#
Retcode:{ps.returncode}
#-----------#
""")
write_str(stdstream_dest, mystr)
stdstream_dest.flush()
return ps.returncode
except subprocess.CalledProcessError:
print(ps.communicate()[0])
@ -93,7 +141,7 @@ def run_one(verbose: bool,
# print(ps.communicate()[0])
return(1)
except subprocess.TimeoutExpired as e:
print("Error: Timeout[{}s]: {}".format(timeout_s, cmd_str))
print("Error: Timeout[{}s]: {}".format(timeout_s, cmd))
if reraise:
raise e
else:

View file

@ -38,7 +38,7 @@
-debug_access+pp
-xlrm uniq_prior_final
-CFLAGS '--std=c99 -fno-extended-identifiers'
-lca -kdb
-lca
-debug_access+f
<cmp_opts> <wave_opts> <cov_opts> <cosim_opts>
cov_opts: >-
@ -62,17 +62,16 @@
sim:
cmd:
- >-
env SIM_DIR=<test_dir>
<tb_dir>/vcs_simv
+vcs+lic+wait
+ntb_random_seed=<seed>
+UVM_TESTNAME=<rtl_test>
+UVM_VERBOSITY=UVM_LOW
+bin=<binary>
+ibex_tracer_file_base=<rtl_trace>
+cosim_log_file=<iss_cosim_trace>
-l <rtl_sim_log>
<sim_opts> <wave_opts> <cov_opts>
<tb_dir>/vcs_simv
+vcs+lic+wait
+ntb_random_seed=<seed>
+UVM_TESTNAME=<rtl_test>
+UVM_VERBOSITY=UVM_LOW
+bin=<binary>
+ibex_tracer_file_base=<rtl_trace>
+cosim_log_file=<iss_cosim_trace>
-l <rtl_sim_log>
<sim_opts> <wave_opts> <cov_opts>
cov_opts: >
-cm line+tgl+assert+fsm+branch
-cm_dir <dir_shared_cov>/test.vdb

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
set -u
set -e

View file

@ -4,3 +4,6 @@
*.elf
*.vmem
*.d
benchmarks/coremark/*.map
benchmarks/coremark/*.log

View file

@ -9,7 +9,7 @@ int main(int argc, char **argv) {
pcount_reset();
pcount_enable(1);
puts("Hello simple system\n");
puts("Hello world from simple system!\n");
puthex(0xDEADBEEF);
putchar('\n');
puthex(0xBAADF00D);

230
flake.lock generated Normal file
View file

@ -0,0 +1,230 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"lowrisc-nix": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix"
},
"locked": {
"lastModified": 1711555471,
"narHash": "sha256-TSpMRZ09Bt3rwjcvMxn6yAzJ4OOvzSO4ngpEqNt0QBg=",
"owner": "lowRISC",
"repo": "lowrisc-nix",
"rev": "ad2aa6c4b91b1b29790b008531b14fed11d5151f",
"type": "github"
},
"original": {
"owner": "lowRISC",
"repo": "lowrisc-nix",
"type": "github"
}
},
"lowrisc-nix-private": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1711624668,
"narHash": "sha256-h6t5XuwWT8aZhaj0IK4NIjJnU4U8IKjTIj3AUmNd7/E=",
"ref": "refs/heads/main",
"rev": "257480fc89f066f33ed33e907ef82955f87d96a5",
"revCount": 4,
"type": "git",
"url": "ssh://git@github.com/lowRISC/lowrisc-nix-private.git"
},
"original": {
"type": "git",
"url": "ssh://git@github.com/lowRISC/lowrisc-nix-private.git"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"lowrisc-nix",
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1703863825,
"narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "5163432afc817cf8bd1f031418d1869e4c9d5547",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1711460390,
"narHash": "sha256-akSgjDZL6pVHEfSE6sz1DNSXuYX6hq+P/1Z5IoYWs7E=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "44733514b72e732bd49f5511bd0203dea9b9a434",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1711460390,
"narHash": "sha256-akSgjDZL6pVHEfSE6sz1DNSXuYX6hq+P/1Z5IoYWs7E=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "44733514b72e732bd49f5511bd0203dea9b9a434",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"poetry2nix": {
"inputs": {
"flake-utils": [
"lowrisc-nix",
"flake-utils"
],
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"lowrisc-nix",
"nixpkgs"
],
"systems": "systems_2",
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1708589824,
"narHash": "sha256-2GOiFTkvs5MtVF65sC78KNVxQSmsxtk0WmV1wJ9V2ck=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "3c92540611f42d3fb2d0d084a6c694cd6544b609",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "poetry2nix",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": [
"lowrisc-nix",
"flake-utils"
],
"lowrisc-nix": "lowrisc-nix",
"lowrisc-nix-private": "lowrisc-nix-private",
"nixpkgs": [
"lowrisc-nix",
"nixpkgs"
],
"poetry2nix": [
"lowrisc-nix",
"poetry2nix"
],
"sv2v": "sv2v"
}
},
"sv2v": {
"flake": false,
"locked": {
"lastModified": 1710128701,
"narHash": "sha256-Az8QeL+sDG+GbqDVH1o5E31GkZ2xE151UTXS1hAps2k=",
"owner": "zachjs",
"repo": "sv2v",
"rev": "df01650444dca89981e866ccc9985ff8b1246a21",
"type": "github"
},
"original": {
"owner": "zachjs",
"repo": "sv2v",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"id": "systems",
"type": "indirect"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": [
"lowrisc-nix",
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1708335038,
"narHash": "sha256-ETLZNFBVCabo7lJrpjD6cAbnE11eDOjaQnznmg/6hAE=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "e504621290a1fd896631ddbc5e9c16f4366c9f65",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

140
flake.nix Normal file
View file

@ -0,0 +1,140 @@
{
description = "Environment for developing and simulating the ibex core.";
inputs = {
# The input 'lowrisc-nix' contains some common dependencies that can be used
# by lowRISC projects. There is also an associated public binary cache.
lowrisc-nix.url = "github:lowRISC/lowrisc-nix";
# The input 'lowrisc-nix-private' is access-controlled.
# Outputs which depend on this input are for internal use only, and will fail
# to evaluate without the appropriate credentials.
# All outputs which depend on this input are suffixed '_lowrisc'
lowrisc-nix-private.url = "git+ssh://git@github.com/lowRISC/lowrisc-nix-private.git";
nixpkgs.follows = "lowrisc-nix/nixpkgs";
flake-utils.follows = "lowrisc-nix/flake-utils";
poetry2nix.follows = "lowrisc-nix/poetry2nix";
sv2v = {
url = "github:zachjs/sv2v";
flake = false;
};
};
outputs = inputs: let
# Only tested with the following systems:
# - x86_64-linux
system = inputs.flake-utils.lib.system.x86_64-linux;
pkgs = import inputs.nixpkgs {
inherit system;
config = {
allowUnfree = true;
allowBroken = true; # sv2v marked as broken.
};
};
# This import creates internal-use only outputs, which build on
# input attributes that cannot be fetched without appropriate credentials.
lr = import ./nix/lowrisc.nix {
inherit inputs pkgs system;
extraDependencies = sim_shared_lib_deps;
};
################
# DEPENDENCIES #
################
# Python environment, defined in ./nix/env/pyproject.toml
pythonEnv = import ./nix/env {inherit inputs pkgs;};
# lowRISC fork of Spike used as a cosimulation model for Ibex Verification
spike = inputs.lowrisc-nix.packages.${system}.spike-ibex-cosim;
# Currently we don't build the riscv-toolchain from src, we use a github release
# See https://github.com/lowRISC/lowrisc-nix/blob/main/pkgs/lowrisc-toolchain-gcc-rv32imcb.nix
rv32imcb_toolchain = inputs.lowrisc-nix.packages.${system}.lowrisc-toolchain-gcc-rv32imcb;
ibex_runtime_deps = with pkgs; [
libelf # Used in DPI code
zlib # Verilator run-time dep
];
sim_shared_lib_deps = with pkgs; [
elfutils
openssl
];
ibex_project_deps =
[
pythonEnv
spike
rv32imcb_toolchain
] ++
sim_shared_lib_deps ++
(with pkgs; [
# Tools
cmake
pkg-config
# Applications
verilator
gtkwave
# Libraries
srecord
]);
ibex_syn = import ./nix/syn.nix {inherit inputs pkgs;};
################
# ENVIRONMENTS #
################
# These exports are required by scripts within the Ibex DV flow.
ibex_profile_common = ''
export SPIKE_PATH=${spike}/bin
export RISCV_TOOLCHAIN=${rv32imcb_toolchain}
export RISCV_GCC=${rv32imcb_toolchain}/bin/riscv32-unknown-elf-gcc
export RISCV_OBJCOPY=${rv32imcb_toolchain}/bin/riscv32-unknown-elf-objcopy
'';
shell = pkgs.lib.makeOverridable pkgs.mkShell {
name = "ibex-devshell";
buildInputs = ibex_runtime_deps;
nativeBuildInputs = ibex_project_deps;
shellHook = ''
# Unset these environment variables provided by stdenv, as the SS makefiles will not
# be able to discover the riscv toolchain versions otherwise.
unset CC OBJCOPY OBJDUMP
${ibex_profile_common}
'';
};
# This shell uses mkShellNoCC as the stdenv CC can interfere with EDA tools.
eda_shell = pkgs.lib.makeOverridable pkgs.mkShellNoCC {
name = "ibex-devshell-eda";
buildInputs = ibex_runtime_deps;
nativeBuildInputs = ibex_project_deps;
shellHook = ''
${ibex_profile_common}
'';
};
syn_shell = shell.override (prev: {
name = "ibex-devshell-synthesis";
nativeBuildInputs = prev.nativeBuildInputs ++ ibex_syn.deps;
shellHook = prev.shellHook + ibex_syn.profile;
});
in {
devShells.${system} = {
default = inputs.self.devShells.${system}.shell;
inherit shell;
inherit eda_shell;
inherit syn_shell;
} // lr.devShells;
};
}

21
nix/env/default.nix vendored Normal file
View file

@ -0,0 +1,21 @@
# Copyright lowRISC Contributors.
# Licensed under the MIT License, see LICENSE for details.
# SPDX-License-Identifier: MIT
{
inputs,
pkgs,
# python3,
...
}: let
poetry2nix = inputs.poetry2nix.lib.mkPoetry2Nix {inherit pkgs;};
ibexPoetryOverrides = import ./poetryOverrides.nix {inherit pkgs;};
lowriscPoetryOverrides = inputs.lowrisc-nix.lib.poetryOverrides {inherit pkgs;};
in
poetry2nix.mkPoetryEnv {
projectDir = ./.;
overrides = [
ibexPoetryOverrides
lowriscPoetryOverrides
poetry2nix.defaultPoetryOverrides
];
}

2205
nix/env/poetry.lock generated vendored Normal file

File diff suppressed because it is too large Load diff

45
nix/env/poetryOverrides.nix vendored Normal file
View file

@ -0,0 +1,45 @@
# Copyright lowRISC Contributors.
# Licensed under the MIT License, see LICENSE for details.
# SPDX-License-Identifier: MIT
{pkgs, ...}:
let
pypkgs-missing-build-requirements = {
# package: build-requirements #
alabaster = ["flit-core"];
# pyboolector = ["setuptools"];
lib-detect-testenv = ["setuptools"];
cli-exit-tools = ["setuptools"];
pathlib3x = ["setuptools"];
typing-utils = ["setuptools"];
svg-py = ["flit-core"];
python-jsonschema-objects = ["setuptools"];
sphinx-issues = ["setuptools"];
sphinxcontrib-log-cabinet = ["setuptools"];
};
buildreqs-overlay = (
final: prev:
builtins.mapAttrs (
package: build-requirements:
(builtins.getAttr package prev).overridePythonAttrs (old: {
buildInputs =
(old.buildInputs or [])
++ (
builtins.map
(pkg: builtins.getAttr pkg final)
build-requirements
);
})
)
pypkgs-missing-build-requirements
);
preferwheel-overlay = final: prev: {
pyboolector = prev.pyboolector.override { # missing "setuptools"
preferWheel = true;
};
};
in
pkgs.lib.composeManyExtensions [
preferwheel-overlay
buildreqs-overlay
]

47
nix/env/pyproject.toml vendored Normal file
View file

@ -0,0 +1,47 @@
[tool.poetry]
name = "opentitan"
version = "0.1.0"
description = "Open-source hardware root-of-trust"
authors = []
license = "Apache 2.0"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
packaging = "23.1"
anytree = "2.8.0"
hjson = "3.1.0"
mako = "1.1.6"
pyyaml = "6.0"
wheel = "0.41.2"
edalize = { git = "https://github.com/lowRISC/edalize.git", tag = "v0.4.0"}
fusesoc = { git = "https://github.com/lowRISC/fusesoc.git", tag = "ot-0.4" }
# Update this when poetry2nix adds hash of later version
rpds-py = "0.13.2"
libcst = "^1.1.0"
isort = "^5.13.2"
junit-xml = "^1.9"
pathlib3x = "^2.0.3"
typing-utils = "^0.1.0"
typeguard = "^4.1.5"
portalocker = "^2.8.2"
pydantic = "^2.5.3"
svg-py = "^1.4.2"
mistletoe = "^1.2.1"
premailer = "^3.10.0"
bitstring = "^4.1.4"
sphinx = "^7.2.6"
pallets-sphinx-themes = "^2.1.1"
sphinxcontrib-log-cabinet = "^1.0.1"
sphinx-issues = "^3.0.1"
sphinx-rtd-theme = "^2.0.0"
rst2pdf = "^0.101"
flake8 = "^7.0.0"
pyvsc = "^0.8.8.7162283399"
tabulate = "^0.9.0"
pandas = "^2.1.4"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

48
nix/lowrisc.nix Normal file
View file

@ -0,0 +1,48 @@
# Copyright lowRISC Contributors.
# Licensed under the MIT License, see LICENSE for details.
# SPDX-License-Identifier: MIT
{
inputs,
pkgs,
system,
extraDependencies,
...
}: let
# Proprietary EDA deps
# These dependencies are behind the 'lowrisc-nix-private' repository, which is access-controlled.
lowriscEdaDeps = (
map (pkg: pkg.override {
# The 'extraDependencies' argument can be used to add deps to the wrapped environments the
# EDA tools run inside. Just adding the deps to the devShell environments is not sufficient, as
# the EDA tool wrappers end up shadowing the same paths with their own wrappers, and hence cannot
# see the additional deps.
inherit extraDependencies;
})
(with inputs.lowrisc-nix-private.packages.${system}; [vcs xcelium])
);
lowriscProfile = ''
# Xcelium
# When building the simulation executable with the EDA tooling wrapped in an FHSenv, we are
# depending on the stdenv.cc. Therefore, the appropriate shared libraries need to be
# located at runtime for these executables to run. The rpath is not set correctly for us to
# discover the correct libraries, and it does not appear to matter as when invoking the simulator
# the search paths of the xrun utility are used, not those of the librun.so library.
# However, setting the DT_NEEDED paths to be static/absolute does resolve correctly.
# Therefore, pass the correct search paths into the build here, and patchelf the librun.so object
# to setup DT_NEEDED correctly (in compile_tb.py) for the appropriate libs (libstdc++ / libgcc_s)
export IBEX_NIX_SHELL_LIB=${pkgs.stdenv.cc.cc.lib}/lib
'';
eda_shell_lowrisc = inputs.self.devShells.${system}.eda_shell.override (prev: {
name = "ibex-devshell-eda-lowrisc";
nativeBuildInputs = prev.nativeBuildInputs ++ lowriscEdaDeps;
shellHook = prev.shellHook + lowriscProfile;
});
in rec {
devShells = {
inherit eda_shell_lowrisc;
};
}

35
nix/sv2v.nix Normal file
View file

@ -0,0 +1,35 @@
# Copyright lowRISC Contributors.
# Licensed under the MIT License, see LICENSE for details.
# SPDX-License-Identifier: MIT
# Taken from:
# https://github.com/deemp/gists/blob/master/haskellPackage/flake.nix
# (I don't know anything about Haskell packaging.)
{
inputs,
pkgs,
...
}: let
packageName = "sv2v";
ghcVersion = "928";
inherit (pkgs.haskell.lib) overrideCabal justStaticExecutables;
hpkgs = pkgs.haskell.packages."ghc${ghcVersion}";
# executableToolDepends - from "sv2v" expression in https://raw.githubusercontent.com/NixOS/nixpkgs/nixos-unstable/pkgs/development/haskell-modules/hackage-packages.nix
package = overrideCabal (hpkgs.callCabal2nix packageName inputs.sv2v.outPath { })
(x: {
executableToolDepends = (x.executableToolDepends or [ ]) ++ (
# Add the following extra dependencies
with pkgs; [
alex
happy
]
);
});
in {
default = justStaticExecutables package;
inherit package;
}

52
nix/syn.nix Normal file
View file

@ -0,0 +1,52 @@
# Copyright lowRISC Contributors.
# Licensed under the MIT License, see LICENSE for details.
# SPDX-License-Identifier: MIT
# Deps for Ibex synthesis jobs
{
inputs,
pkgs,
...
}: let
sv2v_local = import ./sv2v.nix {inherit inputs pkgs;};
ibex_syn_deps = [
sv2v_local.default
] ++ (with pkgs; [
# haskellPackages.sv2v # broken
yosys
openroad
]);
# Create a dumb package of nangate45
# > All we need is a path to the sources
nangate45 = pkgs.stdenv.mkDerivation rec {
pname = "openroad-nangate45";
version = "PDKv1.3_v2010_12.Apache.CCL";
src = pkgs.fetchFromGitHub {
owner = "The-OpenROAD-Project";
repo = "OpenROAD-flow-scripts";
rev = "181e9133776117ea1b9f74dbacbfdaadff8c331b"; # Tag: v3.0
hash = "sha256-fYAdhBsMcuCXmPMQVCRdm75Tk0rd9zLnLfJdjhnhC00=";
};
sourceRoot = "${src.name}/flow/platforms/nangate45";
phases = [ "unpackPhase" "installPhase" ];
installPhase = ''
mkdir -p $out
cp -r ./* $out
'';
};
ibex_syn_profile = ''
export LR_SYNTH_CELL_LIBRARY_NAME=nangate
export LR_SYNTH_CELL_LIBRARY_PATH=${nangate45}/lib/NangateOpenCellLibrary_typical.lib
'';
in {
deps = ibex_syn_deps;
profile = ibex_syn_profile;
}

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

View file

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

View file

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

View file

@ -1,4 +1,4 @@
#!/usr/bin/python3
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

2
syn/syn_setup.example.sh Normal file → Executable file
View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

19
syn/syn_setup.sh Executable file
View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
if [ $# -eq 1 ]; then
export LR_SYNTH_OUT_DIR=$1
elif [ $# -eq 0 ]; then
export LR_SYNTH_OUT_DIR_PREFIX="syn_out/ibex"
LR_SYNTH_OUT_DIR=$(date +"${LR_SYNTH_OUT_DIR_PREFIX}_%d_%m_%Y_%H_%M_%S")
export LR_SYNTH_OUT_DIR
else
echo "Usage $0 [synth_out_dir]"
exit 1
fi
export LR_SYNTH_TIMING_RUN=1
export LR_SYNTH_FLATTEN=1

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Check the return code of run.py in order to give a pass/fail indication to Metrics
echo "run.py return code:" $1

View file

@ -106,7 +106,7 @@ def run_cmd(cmd, timeout_s=999, exit_on_error=1, check_return_code=True,
try:
ps = subprocess.Popen("exec " + cmd,
shell=True,
executable='/bin/bash',
executable='bash',
universal_newlines=True,
start_new_session=True,
env=os.environ,
@ -157,7 +157,7 @@ def run_parallel_cmd(cmd_list, timeout_s=999, exit_on_error=0,
for cmd in cmd_list:
ps = subprocess.Popen("exec " + cmd,
shell=True,
executable='/bin/bash',
executable='bash',
universal_newlines=True,
start_new_session=True,
stdout=subprocess.PIPE,

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
#
# Copyright 2019 Google LLC
#